public override async Task ImportData(Stream stream, SmugglerOptions options) { SmugglerJintHelper.Initialize(options ?? SmugglerOptions); var batchSize = options != null ? options.BatchSize : SmugglerOptions.BatchSize; using (store = CreateStore()) { Task disposeTask = null; try { operation = store.BulkInsert(options: new BulkInsertOptions { BatchSize = batchSize, CheckForUpdates = true }); operation.Report += text => ShowProgress(text); await base.ImportData(stream, options); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
public void bulk_insert_throws_when_server_is_down() { using (var server = GetNewServer(port: 8079)) using (var store = new DocumentStore { Url = "http://localhost:8079", DefaultDatabase = "test" }.Initialize()) { Exception exp = null; for (var run = 0; run < 4; run++) { try { using (var bulkInsert = store.BulkInsert()) { for (var j = 0; j < 10000; j++) { bulkInsert.Store(new Sample()); if (j == 5000 && run == 1) { server.Dispose(); Thread.Sleep(1000); } } } } catch (Exception e) { exp = e; } finally { switch (run) { case 0: Assert.Equal(null, exp); break; case 1: Assert.NotNull(exp.Message); break; case 2: Assert.Equal("Could not get token for bulk insert", exp.Message); Assert.Equal("An error occurred while sending the request.", exp.InnerException.Message); Assert.Equal("Unable to connect to the remote server", exp.InnerException.InnerException.Message); break; case 3: Assert.Equal("Could not get token for bulk insert", exp.Message); Assert.Equal("An error occurred while sending the request.", exp.InnerException.Message); Assert.Equal("Unable to connect to the remote server", exp.InnerException.InnerException.Message); break; default: throw new ArgumentOutOfRangeException(); } } } } }
public static void BulkInsert(DocumentStore store, int numOfItems) { using (var bulkInsert = store.BulkInsert()) { for (int i = 0; i < numOfItems; i++) bulkInsert.Store(new User { FirstName = String.Format("First Name - {0}", i), LastName = String.Format("Last Name - {0}", i) }, String.Format("users/{0}", id++)); } }
/// <summary> /// Bulk Insert 1000 Albums using the BulkInsert API /// </summary> /// <param name="documentStore"></param> private static void DoBulkInsert(DocumentStore documentStore) { using (BulkInsertOperation bulkInsert = documentStore.BulkInsert()) { for (int i = 0; i < 1000; i++) { bulkInsert.Store(new Album { Title = "Title #" + i, Price = 5.99 }); } } }
public void ShouldHaveTheCorrectDatabaseName_WhenTheDatabaseIsSpecifiedInTheUrlOnly() { using (var server = GetNewServer()) { // Create the database, otherwise bulkInsert will throw because of 404 response using (var store = new DocumentStore {Url = "http://localhost:8079", DefaultDatabase = "my-db-name"}.Initialize()) { } using (var store = new DocumentStore {Url = "http://localhost:8079/databases/my-db-name"}.Initialize()) using (var bulkInsert = store.BulkInsert()) { Assert.Equal("http://localhost:8079/databases/my-db-name", ((AsyncServerClient) bulkInsert.DatabaseCommands).Url); } } }
static string API_KEY = ""; // Needed! #endregion Fields #region Methods public static void SyncUserVoiceSiteToRavenDb(Site site, string ravenDbServer, string database) { Console.WriteLine("SyncUserVoiceSite started for UserVoiceSite {0} to RavenDbServer {1}", site.Name, ravenDbServer + "@" + database); Stopwatch watch = new Stopwatch(); watch.Start(); var client = new UserVoice.Client(site.Subdomain, "", apiSecret: ""); // for closed stuff we need the filter=closed (which is the only filter i can access ?? - otherwise just suggestions.json as endpoint var suggestions = client.GetCollection("/api/v1/forums/" + site.Id + "/suggestions.json?filter=closed"); var totalNumber = suggestions.Count; Console.WriteLine("Total suggestions: " + totalNumber); using (var documentStore = new DocumentStore { Url = ravenDbServer, DefaultDatabase = database}) { documentStore.Initialize(); using (var bulkInsert = documentStore.BulkInsert(options: new BulkInsertOptions() { CheckForUpdates = true, BatchSize = 100 })) { int i = 0; foreach (var suggestionJson in suggestions) { int percentage = 0; if(i > 0) { percentage = i * 100/totalNumber; } Console.WriteLine("Completed {0}% (Counter: {1})", percentage, i); var suggestionObject = suggestionJson.ToObject<Suggestion>(); suggestionObject.Site = site; bulkInsert.Store(suggestionObject); i++; } } } watch.Stop(); Console.WriteLine("Job for UserVoiceSite {0} finished. Elpased Time: {1}:{2}", site.Name, watch.Elapsed.Minutes, watch.Elapsed.Seconds); }
private static void Main(string[] args) { var ds = new DocumentStore { Url = "http://localhost:8080", DefaultDatabase = "mr" }.Initialize(); using (var bulk = ds.BulkInsert()) { for (int i = 0; i < 1000 * 1000; i++) { bulk.Store(new Item { Number = 1 }); } } }
public HowToSubscribeToBulkInsertChanges() { using (var store = new DocumentStore()) { #region bulk_insert_changes_2 using (BulkInsertOperation bulkInsert = store.BulkInsert()) { IDisposable subscribtion = store .Changes() .ForBulkInsert(bulkInsert.OperationId) .Subscribe(change => { switch (change.Type) { case DocumentChangeTypes.BulkInsertStarted: // do something break; case DocumentChangeTypes.BulkInsertEnded: // do something break; case DocumentChangeTypes.BulkInsertError: // do something break; } }); try { for (int i = 0; i < 1000 * 1000; i++) { bulkInsert.Store(new Employee { FirstName = "FirstName #" + i, LastName = "LastName #" + i }); } } finally { if (subscribtion != null) subscribtion.Dispose(); } } #endregion } }
static void Main(string[] args) { Console.WriteLine("Starting scan.."); ravenDBConnector = new RavenDBConnector(); Console.WriteLine("Iniating db connection.."); ravenDBStore = ravenDBConnector.InitDBConnection(); Console.WriteLine("Instantiating db session"); session = ravenDBStore.OpenSession(); Console.WriteLine("Getting the Bulk Insert object"); bulkInsert = ravenDBStore.BulkInsert(); string rootStartDrive = @"C:\anddev\"; Console.WriteLine("Drive for scanning is " + rootStartDrive); DirectoryInfo directoryInfo = new DirectoryInfo(rootStartDrive); AnalyzeDirectory(directoryInfo); Console.WriteLine("Scanned {0} folders and {1} files under {2}", folderCount, fileCount,rootStartDrive); Console.ReadLine(); ravenDBConnector.DisposeConnection(); }
public void StartsWithChangesThrowsWithBulkInsert() { using (GetNewServer()) using (var store = new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { Exception e = null; store.Changes().ForDocumentsStartingWith("something").Subscribe(notification => { }, exception => e = exception); using (var session = store.BulkInsert()) { session.Store(new Company(), "else/1"); } Assert.Null(e); } }
public BulkInserts() { using (var store = new DocumentStore()) { #region bulk_inserts_4 using (BulkInsertOperation bulkInsert = store.BulkInsert()) { for (int i = 0; i < 1000 * 1000; i++) { bulkInsert.Store(new Employee { FirstName = "FirstName #" + i, LastName = "LastName #" + i }); } } #endregion } }
private void LoadWorkItems(DocumentStore store, SqlConnection connection, IEnumerable<ClosureReasonItem> allReasons, IEnumerable<HandlingDepartment> allHandlingDepartments, IEnumerable<ProductGroupItem> allProductGroups, IEnumerable<User> allUsers) { IEnumerable<WorkItem> atomWorkItems; // load workItems using (var worekeiItemsBulkInsert = store.BulkInsert()) { atomWorkItems = connection.Query<RavenFusion.Models.AtomSide.WorkItem>(@"SELECT * FROM WORKITEM ORDER BY ID DESC"); var closureReasonDictionary = allReasons.ToDictionary(closureReason => closureReason.Id); var handlingDepartmentsDictionary = allHandlingDepartments.ToDictionary(x => x.id); var productGroupsDictionary = allProductGroups.ToDictionary(x => x.Id); var usersDictionary = allUsers.ToDictionary(x => x.id); foreach (var curWorkItem in atomWorkItems) { GenerateWorkItem(connection, curWorkItem, closureReasonDictionary, handlingDepartmentsDictionary, productGroupsDictionary, worekeiItemsBulkInsert, usersDictionary); } } }
static void Main() { using (var store = new DocumentStore { Url = "http://localhost:8080", DefaultDatabase = "freedb", }.Initialize(true)) { var sp = Stopwatch.StartNew(); using (var insert = store.BulkInsert()) { insert.Report += Console.WriteLine; ParseDisks(insert); } while (store.DatabaseCommands.GetStatistics().StaleIndexes.Length != 0) Thread.Sleep(500); Console.WriteLine(); Console.WriteLine("Done in {0}", sp.Elapsed); } Console.ReadLine(); }
private static async Task<Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; string lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); var jintHelper = new SmugglerJintHelper(); jintHelper.Initialize(databaseOptions); try { while (true) { try { if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(lastEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); lastEtag = etag; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; if (databaseOptions.StripReplicationInformation) document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); if (databaseOptions.ShouldDisableVersioningBundle) document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); if (!string.IsNullOrEmpty(databaseOptions.TransformScript)) { document = jintHelper.Transform(databaseOptions.TransformScript, document); if (document == null) continue; metadata = document.Value<RavenJObject>("@metadata"); } document.Remove("@metadata"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } } } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) timeout = 30; try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var documents = await ((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, lastEtag, exportBatchSize, operationMetadata); foreach (var jToken in documents) { var document = (RavenJObject)jToken; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); lastEtag = etag; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; if (databaseOptions.StripReplicationInformation) document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); if (databaseOptions.ShouldDisableVersioningBundle) document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } break; } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) return Etag.Empty; if (databaseOptions.IgnoreErrorsAndContinue == false) throw; exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } // Load HiLo documents for selected collections databaseOptions.Filters.ForEach(filter => { if (string.Equals(filter.Path, "@metadata.Raven-Entity-Name", StringComparison.OrdinalIgnoreCase)) { filter.Values.ForEach(collectionName => { var doc = exportStore.DatabaseCommands.Get("Raven/Hilo/" + collectionName); if (doc == null) return; doc.Metadata["@id"] = doc.Key; bulkInsertOperation.Store(doc.DataAsJson, doc.Metadata, doc.Key); totalCount++; }); } }); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return lastEtag; } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } } finally { bulkInsertOperation.Dispose(); } }
public void Go() { Console.WriteLine("Starting at {0}",DateTime.Now.ToLongTimeString()); foreach (var user in Users) { var manager = Users[_rand.Next(Users.Count() - 1)]; user.AddManager(manager); AddSicknesses(user, manager); AddLeaves(user, manager); AddConcerns(user, manager); AddPraises(user, manager); AddNotifications(user, manager); } AddHigs(); var b = false; var orgStore = new DocumentStore { ConnectionStringName = "RavenServer" }; orgStore.Initialize(); using (var bulkInsert = orgStore.BulkInsert()) { foreach (var user in Users) { bulkInsert.Store(user); } } Console.WriteLine("Bulk inserted {0} users at {1}",Users.Count,DateTime.Now.ToLongTimeString()); using (var bulkInsert = orgStore.BulkInsert()) { foreach (var praise in Praises) { bulkInsert.Store(praise); } } Console.WriteLine("Bulk inserted {0} praises at {1}", Praises.Count, DateTime.Now.ToLongTimeString()); using (var bulkInsert = orgStore.BulkInsert()) { foreach (var concern in Concerns) { bulkInsert.Store(concern); } } Console.WriteLine("Bulk inserted {0} concerns at {1}", Concerns.Count, DateTime.Now.ToLongTimeString()); using (var bulkInsert = orgStore.BulkInsert()) { foreach (var leave in Leaves) { bulkInsert.Store(leave); } } Console.WriteLine("Bulk inserted {0} leaves at {1}", Leaves.Count, DateTime.Now.ToLongTimeString()); using (var bulkInsert = orgStore.BulkInsert()) { foreach (var sickness in Sicknesses) { bulkInsert.Store(sickness); } } Console.WriteLine("Bulk inserted {0} sicknesses at {1}", Sicknesses.Count, DateTime.Now.ToLongTimeString()); using (var bulkInsert = orgStore.BulkInsert()) { foreach (var notification in Notifications) { bulkInsert.Store(notification); } } Console.WriteLine("Bulk inserted {0} notifications at {1}", Notifications.Count, DateTime.Now.ToLongTimeString()); Console.WriteLine("About to batch bulk insert {0} Higs at {1}", Higs.Count, DateTime.Now.ToLongTimeString()); var moreHigs = true; while (moreHigs) { int num; if (Higs.Count() > 10000) num = 10000; else { num = Higs.Count(); moreHigs = false; } if (num > 0) { using (var bulkInsert = orgStore.BulkInsert()) { foreach (var hig in Higs.GetRange(0, num)) { bulkInsert.Store(hig); } } Higs.RemoveRange(0,num); Console.WriteLine("Bulk inserted {0} higs - {1} higs left at {2}", num, Higs.Count, DateTime.Now.ToLongTimeString()); } } }
private static async Task<Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; var lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); var jintHelper = new SmugglerJintHelper(); jintHelper.Initialize(databaseOptions); var isLastLoop = false; try { while (true) { try { var beforeCount = totalCount; if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); var res = await TransferStreamedDocuments(exportStore, databaseOptions, now, jintHelper, bulkInsertOperation, reportInterval, totalCount, lastEtag, lastReport); totalCount = res.Item1; lastEtag = res.Item2; lastReport = res.Item3; } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) timeout = 30; try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var res = await TransferDocumentsWithoutStreaming(exportStore, databaseOptions, exportBatchSize, operationMetadata, now, bulkInsertOperation, reportInterval, totalCount, lastEtag, lastReport); totalCount = res.Item1; lastEtag = res.Item2; lastReport = res.Item3; break; } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) return Etag.Empty; if (databaseOptions.IgnoreErrorsAndContinue == false) throw; exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) <= 0 && !isLastLoop) { if (totalCount == beforeCount) { isLastLoop = true; ShowProgress("Got no new results , trying one more loop from: {0}", lastEtag); } else ShowProgress("Finished streaming batch, but haven't reached an end (last reached etag = {0})", lastEtag); continue; } // Load HiLo documents for selected collections databaseOptions.Filters.ForEach(filter => { if (string.Equals(filter.Path, "@metadata.Raven-Entity-Name", StringComparison.OrdinalIgnoreCase)) { filter.Values.ForEach(collectionName => { var doc = exportStore.DatabaseCommands.Get("Raven/Hilo/" + collectionName); if (doc == null) return; doc.Metadata["@id"] = doc.Key; bulkInsertOperation.Store(doc.DataAsJson, doc.Metadata, doc.Key); totalCount++; }); } }); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return lastEtag; } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } } finally { bulkInsertOperation.Dispose(); } }
public override async Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options, Stream stream) { SetSmugglerOptions(options); SmugglerJintHelper.Initialize(options); using (store = CreateStore(importOptions.To)) { Task disposeTask = null; try { operation = store.BulkInsert(options: new BulkInsertOptions { BatchSize = options.BatchSize, OverwriteExisting = true }); operation.Report += text => ShowProgress(text); await base.ImportData(importOptions, options, stream); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
private static async Task<Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; string lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); try { while (true) { if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(lastEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); document.Remove("@metadata"); bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } } } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) timeout = 30; try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var documents = await ((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, lastEtag, exportBatchSize, operationMetadata); foreach (RavenJObject document in documents) { var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } break; } catch (Exception e) { if (retries-- == 0) throw; exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading documents, total: {0}", totalCount); return lastEtag; } } finally { bulkInsertOperation.Dispose(); } }
private static void StoreTrialData(string databaseName, int numberOfTrials) { var watch = new Stopwatch(); LogMessage("----------------------------------------------------------------------------------------------------------"); LogMessage(string.Format("StoreTrialData({0}, {1})", databaseName, numberOfTrials)); LogMessage(string.Format("Generating {0} records for insertion", numberOfTrials)); watch.Start(); var trialBatch = new List<TrialData>(); var random = new Random(); for (var i = 1; i <= numberOfTrials; i++) { var parameters = new List<Parameter> {new Parameter { Name = "GBP.TotalReturnIndex", TrialNumber = 1, Value = random.NextDouble() }, new Parameter { Name = "GBP.TotalReturnIndex", TrialNumber = 2, Value = random.NextDouble() }, new Parameter { Name = "GBP.TotalReturnIndex", TrialNumber = 3, Value = random.NextDouble() }}; var trialData = new TrialData { StressNumber = i, Parameters = parameters.ToArray() }; trialBatch.Add(trialData); } watch.Stop(); LogMessage(string.Format("Data generated in {1}:{2}:{3}.{4}", numberOfTrials, watch.Elapsed.Hours, watch.Elapsed.Minutes, watch.Elapsed.Seconds, watch.Elapsed.Milliseconds)); LogMessage(string.Format("Connecting to RavenDB instance: {0}", serverUrl)); IDocumentStore documentStore = new DocumentStore { Url = serverUrl }; LogMessage("Initializing document store"); documentStore.Initialize(); watch.Reset(); watch.Start(); LogMessage("Starting bulk insert"); var builkInsertOptions = new BulkInsertOptions { BatchSize = 10000, CheckForUpdates = false, CheckReferencesInIndexes = false }; using (var bulkInsert = documentStore.BulkInsert(databaseName, builkInsertOptions)) { foreach (var item in trialBatch) bulkInsert.Store(item); } watch.Stop(); LogMessage(string.Format("Inserted {0} records in {1}:{2}:{3}.{4}", numberOfTrials, watch.Elapsed.Hours, watch.Elapsed.Minutes, watch.Elapsed.Seconds, watch.Elapsed.Milliseconds)); LogMessage("----------------------------------------------------------------------------------------------------------"); }