private async Task RunBulkScenario(string collectionName) { var connectionPolicy = new ConnectionPolicy { ConnectionMode = ConnectionMode.Direct, ConnectionProtocol = Protocol.Tcp, }; connectionPolicy.PreferredLocations.Add(LocationNames.WestUS2); MongoBulkExecutor mongoBulkExecutor = new MongoBulkExecutor( new Uri(EndpointUrl), AuthorizationKey, DatabaseName, collectionName, connectionPolicy ); await mongoBulkExecutor.InitializeAsync(); BulkImportResponse bulkImportResponse = null; long totalNumberOfDocumentsInserted = 0; double totalRequestUnitsConsumed = 0; double totalTimeTakenSec = 0; var tokenSource = new CancellationTokenSource(); var token = tokenSource.Token; int numberOfBatches = int.Parse(ConfigurationManager.AppSettings["NumberOfBatches"]); long numberOfDocumentsPerBatch = long.Parse(ConfigurationManager.AppSettings["NumberOfDocumentsPerBatch"]); for (int i = 0; i < numberOfBatches; i++) { // Generate JSON-serialized documents to import. List <string> documentsToImportInBatch = new List <string>(); long prefix = i * numberOfDocumentsPerBatch; Trace.TraceInformation(String.Format("Generating {0} documents to import for batch {1}", numberOfDocumentsPerBatch, i)); documentsToImportInBatch = Util.GetSOHData(i, numberOfDocumentsPerBatch); // Invoke bulk import API. var tasks = new List <Task>(); tasks.Add(Task.Run(async() => { Trace.TraceInformation(String.Format("Executing bulk import for batch {0}", i)); do { try { bulkImportResponse = await mongoBulkExecutor.BulkImportAsync( documents: documentsToImportInBatch, enableUpsert: false); } catch (DocumentClientException de) { Trace.TraceError("Document client exception: {0}", de); break; } catch (Exception e) { Trace.TraceError("Exception: {0}", e); break; } } while (bulkImportResponse.NumberOfDocumentsImported < documentsToImportInBatch.Count); totalNumberOfDocumentsInserted += bulkImportResponse.NumberOfDocumentsImported; totalRequestUnitsConsumed += bulkImportResponse.TotalRequestUnitsConsumed; totalTimeTakenSec += bulkImportResponse.TotalTimeTaken.TotalSeconds; // Code to summarize running total: Console.WriteLine("--------------------------------------------------------------------- "); Console.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec", totalNumberOfDocumentsInserted, Math.Round(totalNumberOfDocumentsInserted / totalTimeTakenSec), Math.Round(totalRequestUnitsConsumed / totalTimeTakenSec), totalTimeTakenSec)); //Console.WriteLine(String.Format("Average RU consumption per document: {0}", // (totalRequestUnitsConsumed / totalNumberOfDocumentsInserted))); //Console.WriteLine(String.Format("Total RU's consumed: {0}", // (totalRequestUnitsConsumed))); //Console.WriteLine(String.Format("Total # of Documents inserted: {0}", // (totalNumberOfDocumentsInserted))); //Trace.WriteLine(String.Format("\nSummary for batch {0}:", i)); //Trace.WriteLine("--------------------------------------------------------------------- "); //Trace.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec", // bulkImportResponse.NumberOfDocumentsImported, // Math.Round(bulkImportResponse.NumberOfDocumentsImported / bulkImportResponse.TotalTimeTaken.TotalSeconds), // Math.Round(bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.TotalTimeTaken.TotalSeconds), // bulkImportResponse.TotalTimeTaken.TotalSeconds)); //Trace.WriteLine(String.Format("Average RU consumption per document: {0}", // (bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.NumberOfDocumentsImported))); //Trace.WriteLine("---------------------------------------------------------------------\n "); Console.WriteLine("--------------------------------------------------------------------- "); }, token)); //tasks.Add(Task.Run(() => //{ // char ch = Console.ReadKey(true).KeyChar; // if (ch == 'c' || ch == 'C') // { // tokenSource.Cancel(); // Trace.WriteLine("\nTask cancellation requested."); // Console.WriteLine("\nCancelling import."); // } //})); await Task.WhenAll(tasks); Trace.WriteLine("\nPress any key to exit."); } }
/// <summary> /// Driver function for bulk import. /// </summary> /// <returns></returns> private async Task RunBulkImportAsync() { // Cleanup on start if set in config. DocumentCollection dataCollection = null; try { if (bool.Parse(ConfigurationManager.AppSettings["ShouldCleanupOnStart"])) { Database database = Utils.GetDatabaseIfExists(client, DatabaseName); if (database != null) { await client.DeleteDatabaseAsync(database.SelfLink); } Trace.TraceInformation("Creating database {0}", DatabaseName); database = await client.CreateDatabaseAsync(new Database { Id = DatabaseName }); Trace.TraceInformation(String.Format("Creating collection {0} with {1} RU/s", CollectionName, CollectionThroughput)); dataCollection = await Utils.CreatePartitionedCollectionAsync(client, DatabaseName, CollectionName, CollectionThroughput); } else { dataCollection = Utils.GetCollectionIfExists(client, DatabaseName, CollectionName); if (dataCollection == null) { throw new Exception("The data collection does not exist"); } } } catch (Exception de) { Trace.TraceError("Unable to initialize, exception message: {0}", de.Message); throw; } // Prepare for bulk import. // Creating documents with simple partition key here. string partitionKeyProperty = dataCollection.PartitionKey.Paths[0].Replace("/", ""); long numberOfDocumentsToGenerate = long.Parse(ConfigurationManager.AppSettings["NumberOfDocumentsToImport"]); int numberOfBatches = int.Parse(ConfigurationManager.AppSettings["NumberOfBatches"]); long numberOfDocumentsPerBatch = (long)Math.Floor(((double)numberOfDocumentsToGenerate) / numberOfBatches); // Set retry options high for initialization (default values). client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds = 30; client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9; var bulkExecutor = new MongoBulkExecutor(client, dataCollection); await bulkExecutor.InitializeAsync(); // Set retries to 0 to pass control to bulk executor. client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds = 0; client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0; BulkImportResponse bulkImportResponse = null; long totalNumberOfDocumentsInserted = 0; double totalRequestUnitsConsumed = 0; double totalTimeTakenSec = 0; var tokenSource = new CancellationTokenSource(); var token = tokenSource.Token; for (int i = 0; i < numberOfBatches; i++) { // Generate JSON-serialized documents to import. List <BsonDocument> documentsToImportInBatch = new List <BsonDocument>(); long prefix = i * numberOfDocumentsPerBatch; Console.WriteLine(String.Format("Generating {0} documents to import for batch {1}", numberOfDocumentsPerBatch, i)); for (int j = 0; j < numberOfDocumentsPerBatch; j++) { string partitionKeyValue = (prefix + j).ToString(); string id = partitionKeyValue + Guid.NewGuid().ToString(); documentsToImportInBatch.Add(Utils.GenerateRandomDocumentString(id, partitionKeyProperty, partitionKeyValue)); } // Invoke bulk import API. var tasks = new List <Task>(); tasks.Add(Task.Run(async() => { Console.WriteLine(String.Format("Executing bulk import for batch {0}", i)); do { try { bulkImportResponse = await bulkExecutor.MongoBulkImportAsync( documents: documentsToImportInBatch, enableUpsert: true, disableAutomaticIdGeneration: false, maxConcurrencyPerPartitionKeyRange: null, maxInMemorySortingBatchSize: null, cancellationToken: token); } catch (DocumentClientException de) { Console.WriteLine("Document client exception: {0}", de); break; } catch (Exception e) { Console.WriteLine("Exception: {0}", e); break; } } while (bulkImportResponse.NumberOfDocumentsImported < documentsToImportInBatch.Count); Console.WriteLine(String.Format("\nSummary for batch {0}:", i)); Console.WriteLine("--------------------------------------------------------------------- "); Console.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec", bulkImportResponse.NumberOfDocumentsImported, Math.Round(bulkImportResponse.NumberOfDocumentsImported / bulkImportResponse.TotalTimeTaken.TotalSeconds), Math.Round(bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.TotalTimeTaken.TotalSeconds), bulkImportResponse.TotalTimeTaken.TotalSeconds)); Console.WriteLine(String.Format("Average RU consumption per document: {0}", (bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.NumberOfDocumentsImported))); Console.WriteLine("---------------------------------------------------------------------\n "); totalNumberOfDocumentsInserted += bulkImportResponse.NumberOfDocumentsImported; totalRequestUnitsConsumed += bulkImportResponse.TotalRequestUnitsConsumed; totalTimeTakenSec += bulkImportResponse.TotalTimeTaken.TotalSeconds; }, token)); /* * tasks.Add(Task.Run(() => * { * char ch = Console.ReadKey(true).KeyChar; * if (ch == 'c' || ch == 'C') * { * tokenSource.Cancel(); * Console.WriteLine("\nTask cancellation requested."); * } * })); */ await Task.WhenAll(tasks); } Console.WriteLine("Overall summary:"); Console.WriteLine("--------------------------------------------------------------------- "); Console.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec", totalNumberOfDocumentsInserted, Math.Round(totalNumberOfDocumentsInserted / totalTimeTakenSec), Math.Round(totalRequestUnitsConsumed / totalTimeTakenSec), totalTimeTakenSec)); Console.WriteLine(String.Format("Average RU consumption per document: {0}", (totalRequestUnitsConsumed / totalNumberOfDocumentsInserted))); Console.WriteLine("--------------------------------------------------------------------- "); // Cleanup on finish if set in config. if (bool.Parse(ConfigurationManager.AppSettings["ShouldCleanupOnFinish"])) { Console.WriteLine("Deleting Database {0}", DatabaseName); await client.DeleteDatabaseAsync(UriFactory.CreateDatabaseUri(DatabaseName)); } Console.WriteLine("\nPress any key to exit."); Console.ReadKey(); }