/// <inheritdoc/>
        async Task <IDocumentLoader> IDocuments.CreateBulkLoader()
        {
            var executor = new BulkExecutor(CloneClient(), Container);
            await executor.InitializeAsync();

            return(new BulkImporter(executor, _serializer, _logger));
        }
Esempio n. 2
0
        public static async Task BulkImportDocuments(List <string> documentsToImportInBatch)
        {
            string             EndpointUrl      = Environment.GetEnvironmentVariable("EndpointUrl");
            string             AuthorizationKey = Environment.GetEnvironmentVariable("AuthorizationKey");
            DocumentClient     _client          = new DocumentClient(new Uri(EndpointUrl), AuthorizationKey);
            DocumentCollection dataCollection   = Utils.GetCollectionIfExists(_client, "db", "coll");
            IBulkExecutor      bulkExecutor     = new BulkExecutor(_client, dataCollection);
            await bulkExecutor.InitializeAsync();

            BulkImportResponse bulkImportResponse = null;
            var tokenSource = new CancellationTokenSource();
            var token       = tokenSource.Token;

            try
            {
                bulkImportResponse = await bulkExecutor.BulkImportAsync(
                    documents : documentsToImportInBatch,
                    enableUpsert : true,
                    disableAutomaticIdGeneration : true,
                    maxConcurrencyPerPartitionKeyRange : null,
                    maxInMemorySortingBatchSize : null,
                    cancellationToken : token);
            }
            catch (DocumentClientException de)
            {
                Console.WriteLine("Document _client exception: {0}", de);
                throw;
            }
            catch (Exception e)
            {
                Console.WriteLine("Exception: {0}", e);
                throw;
            }
            //return bulkImportResponse;
        }
Esempio n. 3
0
        public async Task <(DocumentDbBatchOptions, object)> BeforeAsync(IDbConnection connection,
                                                                         IDataDescriptor descriptor, IDbTransaction transaction = null, int?commandTimeout = null)
        {
            var client = connection.GetClient();

            client = new DocumentClient(client.ServiceEndpoint, client.AuthKey, Defaults.JsonSettings,
                                        client.ConnectionPolicy,
                                        client.ConsistencyLevel);

            client.ConnectionPolicy.ConnectionMode     = ConnectionMode.Direct;
            client.ConnectionPolicy.ConnectionProtocol = Protocol.Tcp;
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

            var databaseId = connection.GetDatabaseId();

            var documentCollection = client.CreateDocumentCollectionQuery(UriFactory.CreateDatabaseUri(databaseId))
                                     .Where(c => c.Id == _options.Value.CollectionId)
                                     .AsEnumerable().FirstOrDefault();

            IBulkExecutor bulkExecutor = new BulkExecutor(client, documentCollection);
            await bulkExecutor.InitializeAsync();

            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

            return(_batchOptions.Value, bulkExecutor);
        }
Esempio n. 4
0
        private Program(
            DocumentClient client,
            ImportConfiguration importConfig)
        {
            this._client       = client;
            this._importConfig = importConfig;

            this._col = new AsyncLazy <DocumentCollection>(async()
                                                           => await InitDbAndCollectionAsync().ConfigureAwait(false));

            this._bulkExecutor = new AsyncLazy <BulkExecutor>(async() =>
            {
                // Set retry options high for initialization (default values).
                _client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
                _client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

                var bulkExecutor = new BulkExecutor(_client, await _col.Value);
                await bulkExecutor.InitializeAsync();

                // Set retries to 0 to pass control to bulk executor.
                _client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                _client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

                return(bulkExecutor);
            });
        }
        public async Task ApplyMigrationAsync(DocumentClient client, string databaseName, string collectionName, object migration)
        {
            if (!await executions.CanExecuteAsync(client, databaseName, collectionName, migration.GetType().Name))
            {
                return;
            }
            var bulkMigration = migration as IBulkImportMigration;

            // Set retry options high during initialization (default values).
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

            var collection = await client.ReadDocumentCollectionAsync(UriFactory.CreateDocumentCollectionUri(databaseName, collectionName));

            IBulkExecutor bulkExecutor = new BulkExecutor(client, collection);
            await bulkExecutor.InitializeAsync();

            // Set retries to 0 to pass complete control to bulk executor.
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;
            await bulkExecutor.BulkImportAsync(
                documents : await bulkMigration.GetDocuments(),
                enableUpsert : true,
                disableAutomaticIdGeneration : true,
                maxConcurrencyPerPartitionKeyRange : null,
                maxInMemorySortingBatchSize : null,
                cancellationToken : new System.Threading.CancellationToken());

            await executions.RegisterExecutedAsync(client, databaseName, collectionName, migration.GetType().Name);
        }
        public static async Task Initialize()
        {
            client = new DocumentClient(new Uri(ConfigurationManager.AppSettings["endpoint"]), ConfigurationManager.AppSettings["authKey"]);
            await CreateDatabaseIfNotExistsAsync();

            var docCollection = await CreateCollectionIfNotExistsAsync();

            BulkExecutor = new BulkExecutor(client, docCollection);
            await BulkExecutor.InitializeAsync();
        }
Esempio n. 7
0
        private async Task <BulkExecutor> BuildClientAsync(string collectionName)
        {
            var collection   = GetCollectionIfExists(client, DatabaseName, collectionName);
            var bulkExecutor = new BulkExecutor(client, collection);
            await bulkExecutor.InitializeAsync();

            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 10;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 5;
            return(bulkExecutor);
        }
Esempio n. 8
0
        private async Task LoadTestData()
        {
            ConnectionPolicy connectionPolicy = new ConnectionPolicy
            {
                ConnectionMode     = ConnectionMode.Direct,
                ConnectionProtocol = Protocol.Tcp
            };

            client = new DocumentClient(new Uri(EndpointUrl), PrimaryKey, connectionPolicy);



            string dbName         = "ParkingLedger";
            string collectionName = "VehicleAccesses";

            await this.client.CreateDatabaseIfNotExistsAsync(new Database { Id = dbName });

            var collection = await this.client.CreateDocumentCollectionIfNotExistsAsync(UriFactory.CreateDatabaseUri(dbName), new DocumentCollection { Id = collectionName });

            // manual update
            //var list = CreateAccessesList();
            //Parallel.ForEach(list, (x) =>
            //{
            //    RegisterVehicleAccess(dbName, collectionName, x);
            //});


            // Set retry options high during initialization (default values).
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

            IBulkExecutor bulkExecutor = new BulkExecutor(client, collection);
            await bulkExecutor.InitializeAsync();

            // Set retries to 0 to pass complete control to bulk executor.
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

            var list          = CreateAccessesList();
            var listOfStrings = list.Select(item => JsonConvert.SerializeObject(item)).ToList();
            var documents     = JsonConvert.SerializeObject(list);

            BulkImportResponse bulkImportResponse = await bulkExecutor.BulkImportAsync(
                documents : listOfStrings,
                enableUpsert : true,
                disableAutomaticIdGeneration : true,
                maxConcurrencyPerPartitionKeyRange : null,
                maxInMemorySortingBatchSize : null);

            Console.WriteLine("Bulk import completed:");
            Console.WriteLine($"\tImported: { bulkImportResponse.NumberOfDocumentsImported}");
            Console.WriteLine($"\tErrors: { bulkImportResponse.BadInputDocuments.Count}");
            Console.WriteLine($"\tRequestUnits: { bulkImportResponse.TotalRequestUnitsConsumed}");
            Console.WriteLine($"\tTime taken: { bulkImportResponse.TotalTimeTaken}");
        }
        public async Task TestIfDocumentsAreUpserted()
        {
            Mock<IBulkExecutor> mockBulkExecutor = new Mock<IBulkExecutor>();
            Mock<ILogger> mockLog = new Mock<ILogger>();
          
            AsyncCollector<Document> postMortemCol = new AsyncCollector<Document>();

            DocumentClient client = new DocumentClient(new Uri(configuration["EndPoint"]), configuration["AuthKey"]);

            DocumentCollection container = client.CreateDocumentCollectionQuery(UriFactory.CreateDatabaseUri(configuration["TargetDatabase"]))
                .Where(c => c.Id == configuration["TargetCollection"]).AsEnumerable().FirstOrDefault();

            IBulkExecutor bulkExecutor = new BulkExecutor(client, container);
            await bulkExecutor.InitializeAsync();

            IEnumerable<string> bulkDocs = Utilities.GenerateDocumentsWithRandomIdAndPk(5000);
            BulkImportResponse bulkImportResponse = await bulkExecutor.BulkImportAsync(bulkDocs, false);

            List<Document> fakeBadDocsBatch = new List<Document>();
            Document doc = new Document();
            doc.Id = "0f4adabc-d461-495f-bdd3-4f8877ae7f3f";

            for (int i = 0; i < 10; i++)
            {
                fakeBadDocsBatch.Add(doc);
            }

            ReadOnlyCollection<Document> readOnlyDocs = fakeBadDocsBatch.AsReadOnly();

            mockBulkExecutor.Setup(bulkExecutorFake => bulkExecutorFake.InitializeAsync())
                .Verifiable();

            mockBulkExecutor.Setup(bulkExecutorFake => bulkExecutorFake.BulkImportAsync(It.IsAny<ReadOnlyCollection<Document>>(), true, true, null, It.IsAny<int>(), It.IsAny<CancellationToken>()))
                .Returns(() => Task.FromResult(bulkImportResponse))

                //Add docs to the badInputDocuments list to test whether the post-mortem queue is employed
                .Callback(() => bulkImportResponse.BadInputDocuments.AddRange(fakeBadDocsBatch));

            DocumentFeedMigrator migrator = new DocumentFeedMigrator(mockBulkExecutor.Object);
            await migrator.Run(postMortemCol, readOnlyDocs, mockLog.Object);
          
            Assert.AreEqual(postMortemCol.Count(), 10);

            mockBulkExecutor.Verify(
                bulkExecutorFake => 
            bulkExecutorFake.BulkImportAsync(
                It.IsAny<ReadOnlyCollection<Document>>(),
                true,
                true,
                null,
                It.IsAny<int>(),
                It.IsAny<CancellationToken>()),
                Times.Exactly(1));
        }
        private async Task WriteDocuments(ConcurrentDictionary <string, List <Document> > docsByType, CancellationToken cancellation = default)
        {
            using (this.StartOperation(_telemetry))
            {
                _logger.LogInformation($"Started writing documents to {_target.Db}/{_target.Collection}");

                // Set retry options high during initialization (default values).
                var client = _targetClient.Client;
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

                IBulkExecutor bulkExecutor = new BulkExecutor(client, _targetClient.Collection);
                await bulkExecutor.InitializeAsync();

                // Set retries to 0 to pass complete control to bulk executor.
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

                long totalDocumentsImported = 0;

                var block = new ActionBlock <string>(
                    async(docType) =>
                {
                    var docs     = docsByType[docType].Select(d => { d.Id = null; return(d); }).ToList();
                    var response = await bulkExecutor.BulkImportAsync(
                        docs,
                        enableUpsert: false,
                        disableAutomaticIdGeneration: false, cancellationToken: cancellation);

                    _logger.LogInformation($"Wrote {response.NumberOfDocumentsImported} documents for type {docType}.");
                    _ru.TrackValue(response.TotalRequestUnitsConsumed, $"import_{docType}");
                    _latency.TrackValue(response.TotalTimeTaken.TotalMilliseconds, $"import_{docType}");
                    _docCount.TrackValue(response.NumberOfDocumentsImported, $"import_{docType}");
                    _error.TrackValue(response.BadInputDocuments, $"import_{docType}");

                    Interlocked.Add(ref totalDocumentsImported, response.NumberOfDocumentsImported);
                },
                    new ExecutionDataflowBlockOptions()
                {
                    MaxDegreeOfParallelism = _syncSettings.MaxDegreeOfParallelism
                });

                foreach (var docType in _syncSettings.DocumentTypes)
                {
                    block.Post(docType);
                }
                block.Complete();
                await block.Completion;

                _logger.LogInformation($"Total of {totalDocumentsImported} documents written to target db.");
            }
        }
Esempio n. 11
0
        private static async Task Restore(DocumentClient client, string database, string collection, string inputFile)
        {
            // ReSharper disable once ReplaceWithSingleCallToFirst
            var documentCollection = client
                                     .CreateDocumentCollectionQuery(UriFactory.CreateDatabaseUri(database))
                                     .Where(c => c.Id == collection)
                                     .AsEnumerable()
                                     .First();

            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

            var bulk = new BulkExecutor(client, documentCollection);
            await bulk.InitializeAsync();

            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

            var filename = Path.Combine(Directory.GetCurrentDirectory(), inputFile);

            if (!File.Exists(filename))
            {
                Console.WriteLine($"File {filename} not found.");
            }

            using (var stream = File.OpenRead(filename)) Console.WriteLine($"Attempting to import {CountLinesMaybe(stream)} records from {filename}.");

            var records  = new List <object>(10);
            var complete = 0;

            using (var stream = File.OpenRead(filename))
                using (var reader = new StreamReader(stream))
                {
                    while (!reader.EndOfStream)
                    {
                        for (var i = 0; i < 10 && !reader.EndOfStream; i++)
                        {
                            var line = await reader.ReadLineAsync();

                            var document = JsonConvert.DeserializeObject(line);
                            records.Add(document);
                        }

                        await bulk.BulkImportAsync(records, enableUpsert : true);

                        complete += records.Count;
                        Console.Write($"{complete}...");
                        records.Clear();
                    }
                }
            Console.WriteLine("Done!");
        }
Esempio n. 12
0
        private static async Task <IBulkExecutor> CreateBulkExecutor(DocumentCollection collection)
        {
            var client       = CreateDocumentClient();
            var bulkExectuor = new BulkExecutor(client, collection);

            // Set retries to 0 to pass complete control to bulk executor.
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

            await bulkExectuor.InitializeAsync();

            return(bulkExectuor);
        }
        public async Task <IBulkExecutor> Create(string databaseId, string collectionId)
        {
            var client     = CreateClient();
            var collection = client.CreateDocumentCollectionQuery(UriFactory.CreateDatabaseUri(databaseId))
                             .Where(c => c.Id == collectionId)
                             .AsEnumerable()
                             .FirstOrDefault();

            var executor = new BulkExecutor(client, collection);
            await executor.InitializeAsync();

            return(executor);
        }
Esempio n. 14
0
        private static IBulkExecutor GetBulkExecutor(DocumentClient client, DocumentCollection targetCollection)
        {
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 30;

            IBulkExecutor bulkExecutor = new BulkExecutor(client, targetCollection);

            // Set retry options to 0 to pass congestion control to bulk executor.
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;

            return(bulkExecutor);
        }
Esempio n. 15
0
        protected override async Task InternalBulkInsertItemsAsync <T>(T[] items, CancellationToken token = new CancellationToken())
        {
            if (items == null)
            {
                throw new ArgumentNullException(nameof(items));
            }

            Logger.LogTrace("BulkInsertItemsAsync: {EntityType} {EntityCount}", typeof(T), items.Length);

            var client = _provider.Client.Value;

            try
            {
                // Set retries to 0 to pass complete control to bulk executor.
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

                IBulkExecutor bulkExecutor = new BulkExecutor(client, _provider.GetCollection());
                await bulkExecutor.InitializeAsync();

                var bulkImportResponse = await bulkExecutor.BulkImportAsync(
                    documents : items,
                    enableUpsert : true,
                    disableAutomaticIdGeneration : true,
                    maxConcurrencyPerPartitionKeyRange : null,
                    maxInMemorySortingBatchSize : null,
                    cancellationToken : token);

                Logger.LogTrace(
                    "BulkImportAsync: Imported: {NumberOfDocumentsImported} / RequestUnits: {RequestCharge} / TimeTaken {TotalTimeTaken}",
                    bulkImportResponse.NumberOfDocumentsImported,
                    bulkImportResponse.TotalRequestUnitsConsumed,
                    bulkImportResponse.TotalTimeTaken);

                if (bulkImportResponse.BadInputDocuments != null && bulkImportResponse.BadInputDocuments.Any())
                {
                    Logger.LogWarning("BulkImport Bad Documents");
                    foreach (var o in bulkImportResponse.BadInputDocuments)
                    {
                        Logger.LogWarning("BulkImport Bad Doc {@doc}", o);
                    }

                    throw new InvalidOperationException("Bulk Import Bad Documents");
                }
            }
            finally
            {
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;
            }
        }
        /// <summary>
        /// Driver function for bulk import.
        /// </summary>
        /// <returns></returns>
        private async Task RunBulkImportAndBulkDeleteAsync()
        {
            // Import documents into the collection
            await this.RunBulkImport();

            // Fetch <PartitionKey><DocumentId> tuples to delete in bulk
            List <Tuple <string, string> > pkIdTuplesToDelete = GeneratePartitionKeyDocumentIdTuplesToBulkDelete();

            long               totalNumberOfDocumentsDeleted = 0;
            double             totalRequestUnitsConsumed     = 0;
            double             totalTimeTakenSec             = 0;
            BulkDeleteResponse bulkDeleteResponse            = null;

            BulkExecutor bulkExecutor = new BulkExecutor(this.client, this.dataCollection);
            await bulkExecutor.InitializeAsync();

            try
            {
                bulkDeleteResponse = await bulkExecutor.BulkDeleteAsync(pkIdTuplesToDelete);

                totalNumberOfDocumentsDeleted = bulkDeleteResponse.NumberOfDocumentsDeleted;
                totalRequestUnitsConsumed     = bulkDeleteResponse.TotalRequestUnitsConsumed;
                totalTimeTakenSec             = bulkDeleteResponse.TotalTimeTaken.TotalSeconds;
            }
            catch (DocumentClientException de)
            {
                Trace.TraceError("Document client exception: {0}", de);
            }
            catch (Exception e)
            {
                Trace.TraceError("Exception: {0}", e);
            }

            Trace.WriteLine("\n\n--------------------------------------------------------------------- ");
            Trace.WriteLine("Executing bulk delete:");
            Trace.WriteLine("--------------------------------------------------------------------- ");
            Trace.WriteLine("\n\nOverall summary of bulk delete:");
            Trace.WriteLine("--------------------------------------------------------------------- ");
            Trace.WriteLine(String.Format("Deleted {0} docs @ {1} writes/s, {2} RU/s in {3} sec",
                                          totalNumberOfDocumentsDeleted,
                                          Math.Round(totalNumberOfDocumentsDeleted / totalTimeTakenSec),
                                          Math.Round(totalRequestUnitsConsumed / totalTimeTakenSec),
                                          totalTimeTakenSec));
            Trace.WriteLine(String.Format("Average RU consumption per document delete: {0}",
                                          (totalRequestUnitsConsumed / totalNumberOfDocumentsDeleted)));
            Trace.WriteLine("--------------------------------------------------------------------- \n");

            //-----------------------------------------------------------------------------------------------
        }
Esempio n. 17
0
            /// <inheritdoc/>
            public async Task <bool> AddManyAsync(IReadOnlyList <T> items)
            {
                if (_client is DocumentClient documentClient)
                {
                    var executor = new BulkExecutor(documentClient, _documentCollection);

                    await executor.InitializeAsync();

                    var response = await executor.BulkImportAsync(items.Cast <object>());

                    return(response.NumberOfDocumentsImported == items.Count);
                }

                return(false);
            }
Esempio n. 18
0
        public async Task <IBulkExecutor> InitializeBulkExecutorAsync()
        {
            var dataCollection = await GetDocumentCollectionAsync(_databaseName, ConstantVariables.CosmosCollectionName, _collectionThroughput,
                                                                  ConstantVariables.CosmosDbCollectionPartitionKey);

            _client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            _client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

            var bulkExecutor = new BulkExecutor(_client, dataCollection);
            await bulkExecutor.InitializeAsync();

            _client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            _client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

            return(bulkExecutor);
        }
        /// <summary>
        /// This method uses the Cosmos DB BulkExecutor library to bulk ingest the input list of JSON documents
        /// </summary>
        /// <param name="documentsToImport"> List of documents to bulk ingest into Cosmos DB </param>
        public async void DataImportForMultipleTemplates(List <string> documentsToImport)
        {
            DocumentCollection collection = GetCollectionIfExists(this.DatabaseName, this.CollectionName);

            if (collection == null)
            {
                throw new Exception("The collection does not exist");
            }

            BulkExecutor bulkExecutor = new BulkExecutor(this.Client, collection);
            await bulkExecutor.InitializeAsync();

            BulkImportResponse bulkImportResponse = null;
            long   totalNumberOfDocumentsInserted = 0;
            double totalRequestUnitsConsumed      = 0;
            double totalTimeTakenSec = 0;

            try
            {
                bulkImportResponse = await bulkExecutor.BulkImportAsync(documentsToImport, false, false);
            }
            catch (DocumentClientException de)
            {
                Console.WriteLine("Document client exception while execting bulk insert. Stack trace: \n {0}", de.StackTrace);
                Console.ReadLine();
            }
            catch (Exception e)
            {
                Console.WriteLine("Exception thrown while executing bulk insert. Stack trace:\n {0}", e.StackTrace);
                Console.ReadLine();
            }

            Console.WriteLine(String.Format("\nSummary for write."));
            Console.WriteLine("--------------------------------------------------------------------- ");
            Console.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec)",
                                            bulkImportResponse.NumberOfDocumentsImported,
                                            Math.Round(bulkImportResponse.NumberOfDocumentsImported / bulkImportResponse.TotalTimeTaken.TotalSeconds),
                                            Math.Round(bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.TotalTimeTaken.TotalSeconds),
                                            bulkImportResponse.TotalTimeTaken.TotalSeconds));
            Console.WriteLine(String.Format("Average RU consumption per document: {0}",
                                            (bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.NumberOfDocumentsImported)));
            Console.WriteLine("---------------------------------------------------------------------\n ");

            totalNumberOfDocumentsInserted += bulkImportResponse.NumberOfDocumentsImported;
            totalRequestUnitsConsumed      += bulkImportResponse.TotalRequestUnitsConsumed;
            totalTimeTakenSec += bulkImportResponse.TotalTimeTaken.TotalSeconds;
        }
        public async Task DeleteAsync(IEnumerable <string> ids)
        {
            var documentCollection = await _documentClient.ReadDocumentCollectionAsync(GetCollectionUri()).ConfigureAwait(false);

            var bulkExecutor = new BulkExecutor(_documentClient as Documents.Client.DocumentClient, documentCollection);
            await bulkExecutor.InitializeAsync().ConfigureAwait(false);

            var entries = ids.Select(x => new Tuple <string, string>(x, x)).ToList();

            BulkDeleteResponse bulkDeleteResponse = null;

            do
            {
                bulkDeleteResponse = await bulkExecutor
                                     .BulkDeleteAsync(entries)
                                     .ConfigureAwait(false);
            } while (bulkDeleteResponse.NumberOfDocumentsDeleted < entries.Count && bulkDeleteResponse.NumberOfDocumentsDeleted > 0);
        }
Esempio n. 21
0
        private async Task <IBulkExecutor> CreateBulkExecutor(DocumentClient client, DocumentCollection collection)
        {
            try
            {
                var bulkExecutor = new BulkExecutor(client, collection);
                await bulkExecutor.InitializeAsync();

                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

                return(bulkExecutor);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                throw;
            }
        }
        public async Task UpsertAsync(IEnumerable <TEntity> entities)
        {
            var documentCollection = await _documentClient.ReadDocumentCollectionAsync(GetCollectionUri()).ConfigureAwait(false);

            var bulkExecutor = new BulkExecutor(_documentClient as Documents.Client.DocumentClient, documentCollection);
            await bulkExecutor.InitializeAsync().ConfigureAwait(false);

            var entries = entities.Select(x => new DbEntry <TEntity>(x, _model.Analyzer, _jsonSerializerSettings));

            BulkImportResponse bulkImportResponse = null;

            do
            {
                bulkImportResponse = await bulkExecutor
                                     .BulkImportAsync(
                    entries,
                    enableUpsert : true,
                    disableAutomaticIdGeneration : true)
                                     .ConfigureAwait(false);
            } while (bulkImportResponse.NumberOfDocumentsImported < entries.Count());
        }
Esempio n. 23
0
        public async Task <int> UpsertObjects(List <JObject> list, CancellationToken cancel = default)
        {
            Client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            Client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;
            IBulkExecutor bulkExecutor = new BulkExecutor(Client, Collection);
            await bulkExecutor.InitializeAsync();

            Client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            Client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

            var response = await bulkExecutor.BulkImportAsync(
                list,
                enableUpsert : true,
                disableAutomaticIdGeneration : false,
                cancellationToken : cancel);

            _logger.LogInformation($"Wrote {response.NumberOfDocumentsImported} documents");

            _logger.LogInformation(
                $"Total of {response.NumberOfDocumentsImported} documents written to {Collection.Id}.");

            return((int)response.NumberOfDocumentsImported);
        }
Esempio n. 24
0
        public async Task <IBulkExecutor> CreateAsync()
        {
            var databaseUri = UriFactory.CreateDatabaseUri(_cosmos.Value.DatabaseId);
            var collection  = new DocumentCollection {
                Id = _cosmos.Value.DatasetContainerId
            };

            collection.PartitionKey.Paths.Add("/word");
            collection.IndexingPolicy.IncludedPaths.Add(new IncludedPath {
                Path = "/*"
            });
            collection.IndexingPolicy.ExcludedPaths.Add(new ExcludedPath {
                Path = "/drawing/*"
            });

            var collectionResponse = await _client.CreateDocumentCollectionIfNotExistsAsync(databaseUri, collection);

            var bulkExecutor = new BulkExecutor(_client, collectionResponse);
            await bulkExecutor.InitializeAsync();

            _client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            _client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;
            return(bulkExecutor);
        }
        private async Task <int> WriteDocuments(string collectionName, List <Document> docs, CancellationToken stoppingToken = default)
        {
            using (this.StartOperation(_telemetry))
            {
                var partitionKeyPaths = _sourceClient.Collection.PartitionKey.Paths?.ToArray();
                await _targetClient.SwitchCollection(collectionName, partitionKeyPaths);

                // Set retry options high during initialization (default values).
                var client = _targetClient.Client;
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

                IBulkExecutor bulkExecutor = new BulkExecutor(client, _targetClient.Collection);
                await bulkExecutor.InitializeAsync();

                // Set retries to 0 to pass complete control to bulk executor.
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

                var response = await bulkExecutor.BulkImportAsync(
                    docs,
                    enableUpsert : true,
                    disableAutomaticIdGeneration : false,
                    cancellationToken : stoppingToken);

                _logger.LogInformation($"Wrote {response.NumberOfDocumentsImported} documents");
                _ru.TrackValue(response.TotalRequestUnitsConsumed, $"import_{collectionName}");
                _latency.TrackValue(response.TotalTimeTaken.TotalMilliseconds, $"import_{collectionName}");
                _docCount.TrackValue(response.NumberOfDocumentsImported, $"import_{collectionName}");
                _error.TrackValue(response.BadInputDocuments?.Count ?? 0, $"import_{collectionName}");

                _logger.LogInformation($"Total of {response.NumberOfDocumentsImported} documents written to {collectionName}.");

                return((int)response.NumberOfDocumentsImported);
            }
        }
        /// <summary>
        /// 批量导入
        /// </summary>
        /// <returns></returns>
        public static async Task RunBulkImportAsync(string file, string orderId)
        {
            //读取文件

            try
            {
                DocumentCollection dataCollection = null;
                if (client == null)
                {
                    client = new DocumentClient(new Uri(ConfigurationManager.AppSettings["endpoint"]), ConfigurationManager.AppSettings["authKey"], ConnectionPolicy);
                }
                dataCollection = Utils.GetCollectionIfExists(client, DatabaseId, qrcodeTable);
                if (dataCollection == null)
                {
                    throw new Exception("The data collection does not exist");
                }

                // Prepare for bulk import.
                // Creating documents with simple partition key here.
                string partitionKeyProperty = dataCollection.PartitionKey.Paths[0].Replace("/", "");

                // Set retry options high for initialization (default values).
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

                IBulkExecutor bulkExecutor = new BulkExecutor(client, dataCollection);
                await bulkExecutor.InitializeAsync();

                // Set retries to 0 to pass control to bulk executor.
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

                BulkImportResponse bulkImportResponse = null;
                long   totalNumberOfDocumentsInserted = 0;
                double totalRequestUnitsConsumed      = 0;
                double totalTimeTakenSec = 0;

                var tokenSource = new CancellationTokenSource();
                var token       = tokenSource.Token;

                StreamReader sourceFileStream = new StreamReader(file, Encoding.Default);

                for (int i = 0; i < numberOfBatches && sourceFileStream != null; i++)
                {
                    // Generate JSON-serialized documents to import.
                    List <string> documentsToImportInBatch = new List <string>();
                    long          prefix = i * numberOfDocumentsPerBatch;
                    // 批量文档写入,读取文件, 写入bulk中
                    documentsToImportInBatch = Utils.AddDocumentFromFile(sourceFileStream, numberOfDocumentsPerBatch, orderId, out sourceFileStream);
                    // Invoke bulk import API.
                    var tasks = new List <Task>();
                    tasks.Add(Task.Run(async() =>
                    {
                        //Trace.TraceInformation(String.Format("Executing bulk import for batch {0}", i));
                        do
                        {
                            try
                            {
                                bulkImportResponse = await bulkExecutor.BulkImportAsync(
                                    documents: documentsToImportInBatch,
                                    enableUpsert: true,
                                    disableAutomaticIdGeneration: true,
                                    maxConcurrencyPerPartitionKeyRange: null,
                                    maxInMemorySortingBatchSize: null,
                                    cancellationToken: token);
                            }
                            catch (DocumentClientException de)
                            {
                                //Trace.TraceError("Document client exception: {0}", de);
                                break;
                            }
                            catch (Exception e)
                            {
                                //Trace.TraceError("Exception: {0}", e);
                                break;
                            }
                        } while (bulkImportResponse.NumberOfDocumentsImported < documentsToImportInBatch.Count);

                        totalNumberOfDocumentsInserted += bulkImportResponse.NumberOfDocumentsImported;
                        totalRequestUnitsConsumed      += bulkImportResponse.TotalRequestUnitsConsumed;
                        totalTimeTakenSec += bulkImportResponse.TotalTimeTaken.TotalSeconds;
                        logger.InfoFormat("fileName:{0} orderId:{1} ", file, orderId);
                        logger.InfoFormat("totalNumberOfDocumentsInserted:{0} totalRequestUnitsConsumed:{1} totalTimeTakenSec:{2}", totalNumberOfDocumentsInserted, totalRequestUnitsConsumed, totalTimeTakenSec);
                    },
                                       token));

                    await Task.WhenAll(tasks);
                }
            }
            catch (Exception de)
            {
                logger.InfoFormat("Insert Error: ", de);
            }
        }
        public async void CreateItemsAsync(T[] item)
        {
            ConnectionPolicy ConnectionPolicy = new ConnectionPolicy
            {
                ConnectionMode     = ConnectionMode.Direct,
                ConnectionProtocol = Protocol.Tcp
            };

            using (var client = new DocumentClient(new Uri(Endpoint), Key, ConnectionPolicy))
            {
                // Set retry options high during initialization (default values).
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

                DocumentCollection dataCollection = GetCollectionIfExists(client, DatabaseId, CollectionId);
                IBulkExecutor      bulkExecutor   = new BulkExecutor(client, dataCollection);
                await bulkExecutor.InitializeAsync();

                // Set retries to 0 to pass complete control to bulk executor.
                client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

                BulkImportResponse?bulkImportResponse = null;
                long   totalNumberOfDocumentsInserted = 0;
                double totalRequestUnitsConsumed      = 0;
                double totalTimeTakenSec = 0;

                long numberOfDocumentsToGenerate = item.Length;
                long numberOfDocumentsPerBatch   = 100;
                int  numberOfBatches             = (int)Math.Ceiling(((double)numberOfDocumentsToGenerate) / numberOfDocumentsPerBatch);


                var tokenSource    = new CancellationTokenSource();
                var token          = tokenSource.Token;
                var documentNumber = 0;

                for (int i = 0; i < numberOfBatches; i++)
                {
                    // Generate JSON-serialized documents to import.

                    List <string> documentsToImportInBatch = new List <string>();
                    long          prefix = i * numberOfDocumentsPerBatch;

                    Trace.TraceInformation(String.Format("Generating {0} documents to import for batch {1}", numberOfDocumentsPerBatch, i));
                    for (int j = 0; j < numberOfDocumentsPerBatch; j++)
                    {
                        if (item != null && item[documentNumber] != null && documentNumber < item.Length)
                        {
                            T      toAdd = item[documentNumber++];
                            string?s     = toAdd.ToString();
                            if (s != null)
                            {
                                documentsToImportInBatch.Add(s);
                            }
                        }
                    }

                    // Invoke bulk import API.

                    var tasks = new List <Task>
                    {
                        Task.Run(async() =>
                        {
                            Trace.TraceInformation(String.Format("Executing bulk import for batch {0}", i));
                            do
                            {
                                try
                                {
                                    bulkImportResponse = await bulkExecutor.BulkImportAsync(
                                        documents: documentsToImportInBatch,
                                        enableUpsert: false,
                                        disableAutomaticIdGeneration: true,
                                        maxConcurrencyPerPartitionKeyRange: null,
                                        maxInMemorySortingBatchSize: null,
                                        cancellationToken: token);
                                }
                                catch (DocumentClientException de)
                                {
                                    Trace.TraceError("Document client exception: {0}", de);
                                    break;
                                }
                                catch (Exception e)
                                {
                                    Trace.TraceError("Exception: {0}", e);
                                    break;
                                }
                            } while (bulkImportResponse.NumberOfDocumentsImported < documentsToImportInBatch.Count);

                            if (bulkImportResponse != null)
                            {
                                Trace.WriteLine(String.Format("\nSummary for batch {0}:", i));
                                Trace.WriteLine("--------------------------------------------------------------------- ");

                                Trace.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec",
                                                              bulkImportResponse.NumberOfDocumentsImported,
                                                              Math.Round(bulkImportResponse.NumberOfDocumentsImported / bulkImportResponse.TotalTimeTaken.TotalSeconds),
                                                              Math.Round(bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.TotalTimeTaken.TotalSeconds),
                                                              bulkImportResponse.TotalTimeTaken.TotalSeconds));
                                Trace.WriteLine(String.Format("Average RU consumption per document: {0}",
                                                              (bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.NumberOfDocumentsImported)));
                                Trace.WriteLine("---------------------------------------------------------------------\n ");

                                totalNumberOfDocumentsInserted += bulkImportResponse.NumberOfDocumentsImported;
                                totalRequestUnitsConsumed      += bulkImportResponse.TotalRequestUnitsConsumed;
                                totalTimeTakenSec += bulkImportResponse.TotalTimeTaken.TotalSeconds;
                            }
                        },
                                 token)
                    };

                    await Task.WhenAll(tasks);
                }
            }
        }
Esempio n. 28
0
        private async Task ImportFileToCollectionAsync(string jsonFile, string collectionIdDestination)
        {
            //if (!Directory.Exists(folder))
            //{
            //    return;
            //}

            var options = new ParallelOptions()
            {
                MaxDegreeOfParallelism = MaxThreads
            };

            //var jsonFiles = Directory.GetFiles(folder, "*.json");

            var documentsToImportInBatch = new List <string>();

            //foreach (var jsonFile in jsonFiles)
            //{
            documentsToImportInBatch.Add(File.ReadAllText(jsonFile));
            //}

            if (cbBulkUpload.Checked)
            {
                var           dataCollection = cosmosDest.DocumentCollectionDict[collectionIdDestination];
                IBulkExecutor bulkExecutor   = new BulkExecutor(cosmosDest.Client, dataCollection);
                await bulkExecutor.InitializeAsync();

                //var bulkExecutor = new BulkImportAsync(cosmosDest.Client, dataCollection);
                //bulkExecutor.InitializeAsync().Wait();
                // Set retries to 0 to pass complete control to bulk executor.
                cosmosDest.Client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
                cosmosDest.Client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;
                var tokenSource = new CancellationTokenSource();
                var token       = tokenSource.Token;


                //Parallel.ForEach(jsonFiles, options,
                //    jsonFile => { documentsToImportInBatch.Add(File.ReadAllText(jsonFile)); });

                //var s = "{  \"name\": \"Afzaal Ahmad Zeeshan\",  \"id\": {"+Guid.NewGuid()+"}  /} ";

                try
                {
                    await bulkExecutor.BulkImportAsync(
                        documentsToImportInBatch,
                        enableUpsert : true,
                        disableAutomaticIdGeneration : true,
                        maxConcurrencyPerPartitionKeyRange : 2000,
                        maxInMemorySortingBatchSize : null,
                        cancellationToken : token);
                }
                catch (Exception e)
                {
                    throw e;
                }

                //var s = documentsToImportInBatch[0];
                //documentsToImportInBatch.Clear();
                //documentsToImportInBatch.Add(s);
                ////.Result;
            }
            else
            {
                var collectionUri = UriFactory.CreateDocumentCollectionUri(cosmosDest.DbName, collectionIdDestination);

                Parallel.ForEach(documentsToImportInBatch, options,
                                 doc => { cosmosDest.Client.UpsertDocumentAsync(collectionUri, JObject.Parse(doc)).Wait(); });
            }
        }
Esempio n. 29
0
        /// <summary>
        /// Driver function for bulk import.
        /// </summary>
        /// <returns></returns>
        private async Task RunBulkImportAndUpdateAsync()
        {
            // Cleanup on start if set in config.

            DocumentCollection dataCollection = null;

            try
            {
                if (bool.Parse(ConfigurationManager.AppSettings["ShouldCleanupOnStart"]))
                {
                    Database database = Utils.GetDatabaseIfExists(client, DatabaseName);
                    if (database != null)
                    {
                        await client.DeleteDatabaseAsync(database.SelfLink);
                    }

                    Trace.TraceInformation("Creating database {0}", DatabaseName);
                    database = await client.CreateDatabaseAsync(new Database { Id = DatabaseName });

                    Trace.TraceInformation(String.Format("Creating collection {0} with {1} RU/s", CollectionName, CollectionThroughput));
                    dataCollection = await Utils.CreatePartitionedCollectionAsync(client, DatabaseName, CollectionName, CollectionThroughput);
                }
                else
                {
                    dataCollection = Utils.GetCollectionIfExists(client, DatabaseName, CollectionName);
                    if (dataCollection == null)
                    {
                        throw new Exception("The data collection does not exist");
                    }
                }
            }
            catch (Exception de)
            {
                Trace.TraceError("Unable to initialize, exception message: {0}", de.Message);
                throw;
            }

            // Prepare for bulk import.

            // Creating documents with simple partition key here.
            string partitionKeyProperty = dataCollection.PartitionKey.Paths[0].Replace("/", "");

            long numberOfDocumentsToGenerate = long.Parse(ConfigurationManager.AppSettings["NumberOfDocumentsToUpdate"]);
            int  numberOfBatches             = int.Parse(ConfigurationManager.AppSettings["NumberOfBatches"]);
            long numberOfDocumentsPerBatch   = (long)Math.Floor(((double)numberOfDocumentsToGenerate) / numberOfBatches);

            // Set retry options high for initialization (default values).
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 30;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 9;

            IBulkExecutor bulkExecutor = new BulkExecutor(client, dataCollection);
            await bulkExecutor.InitializeAsync();

            // Set retries to 0 to pass control to bulk executor.
            client.ConnectionPolicy.RetryOptions.MaxRetryWaitTimeInSeconds           = 0;
            client.ConnectionPolicy.RetryOptions.MaxRetryAttemptsOnThrottledRequests = 0;

            BulkImportResponse bulkImportResponse = null;
            long   totalNumberOfDocumentsInserted = 0;
            double totalRequestUnitsConsumed      = 0;
            double totalTimeTakenSec = 0;

            var tokenSource = new CancellationTokenSource();
            var token       = tokenSource.Token;

            for (int i = 0; i < numberOfBatches; i++)
            {
                // Generate JSON-serialized documents to import.

                List <string> documentsToImportInBatch = new List <string>();
                long          prefix = i * numberOfDocumentsPerBatch;

                Trace.TraceInformation(String.Format("Generating {0} documents to import for batch {1}", numberOfDocumentsPerBatch, i));
                for (int j = 0; j < numberOfDocumentsPerBatch; j++)
                {
                    string partitionKeyValue = (prefix + j).ToString();
                    string id = partitionKeyValue;

                    documentsToImportInBatch.Add(Utils.GenerateRandomDocumentString(id, partitionKeyProperty, partitionKeyValue));
                }

                // Invoke bulk import API.

                var tasks = new List <Task>();

                tasks.Add(Task.Run(async() =>
                {
                    Trace.TraceInformation(String.Format("Executing bulk import for batch {0}", i));
                    do
                    {
                        try
                        {
                            bulkImportResponse = await bulkExecutor.BulkImportAsync(
                                documents: documentsToImportInBatch,
                                enableUpsert: true,
                                disableAutomaticIdGeneration: true,
                                maxConcurrencyPerPartitionKeyRange: null,
                                maxInMemorySortingBatchSize: null,
                                cancellationToken: token);
                        }
                        catch (DocumentClientException de)
                        {
                            Trace.TraceError("Document client exception: {0}", de);
                            break;
                        }
                        catch (Exception e)
                        {
                            Trace.TraceError("Exception: {0}", e);
                            break;
                        }
                    } while (bulkImportResponse.NumberOfDocumentsImported < documentsToImportInBatch.Count);

                    Trace.WriteLine(String.Format("\nSummary for batch {0}:", i));
                    Trace.WriteLine("--------------------------------------------------------------------- ");
                    Trace.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec",
                                                  bulkImportResponse.NumberOfDocumentsImported,
                                                  Math.Round(bulkImportResponse.NumberOfDocumentsImported / bulkImportResponse.TotalTimeTaken.TotalSeconds),
                                                  Math.Round(bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.TotalTimeTaken.TotalSeconds),
                                                  bulkImportResponse.TotalTimeTaken.TotalSeconds));
                    Trace.WriteLine(String.Format("Average RU consumption per document insert: {0}",
                                                  (bulkImportResponse.TotalRequestUnitsConsumed / bulkImportResponse.NumberOfDocumentsImported)));
                    Trace.WriteLine("---------------------------------------------------------------------\n ");

                    totalNumberOfDocumentsInserted += bulkImportResponse.NumberOfDocumentsImported;
                    totalRequestUnitsConsumed      += bulkImportResponse.TotalRequestUnitsConsumed;
                    totalTimeTakenSec += bulkImportResponse.TotalTimeTaken.TotalSeconds;
                },
                                   token));

                /*
                 * tasks.Add(Task.Run(() =>
                 * {
                 *  char ch = Console.ReadKey(true).KeyChar;
                 *  if (ch == 'c' || ch == 'C')
                 *  {
                 *      tokenSource.Cancel();
                 *      Trace.WriteLine("\nTask cancellation requested.");
                 *  }
                 * }));
                 */

                await Task.WhenAll(tasks);
            }

            Trace.WriteLine("Overall summary of bulk import:");
            Trace.WriteLine("--------------------------------------------------------------------- ");
            Trace.WriteLine(String.Format("Inserted {0} docs @ {1} writes/s, {2} RU/s in {3} sec",
                                          totalNumberOfDocumentsInserted,
                                          Math.Round(totalNumberOfDocumentsInserted / totalTimeTakenSec),
                                          Math.Round(totalRequestUnitsConsumed / totalTimeTakenSec),
                                          totalTimeTakenSec));
            Trace.WriteLine(String.Format("Average RU consumption per document insert: {0}",
                                          (totalRequestUnitsConsumed / totalNumberOfDocumentsInserted)));
            Trace.WriteLine("--------------------------------------------------------------------- \n");

            //-----------------------------------------------------------------------------------------------

            // Prepare for bulk update.

            BulkUpdateResponse bulkUpdateResponse = null;
            long totalNumberOfDocumentsUpdated    = 0;

            totalRequestUnitsConsumed = 0;
            totalTimeTakenSec         = 0;

            tokenSource = new CancellationTokenSource();
            token       = tokenSource.Token;

            // Generate update operations.
            List <UpdateOperation> updateOperations = new List <UpdateOperation>();

            // Set the name field.
            updateOperations.Add(new SetUpdateOperation <string>("Name", "UpdatedDoc"));
            // Unset the description field.
            updateOperations.Add(new UnsetUpdateOperation("description"));

            for (int i = 0; i < numberOfBatches; i++)
            {
                // Generate update items.

                List <UpdateItem> updateItemsInBatch = new List <UpdateItem>();
                long prefix = i * numberOfDocumentsPerBatch;

                Trace.TraceInformation(String.Format("Generating {0} update items for batch {1}", numberOfDocumentsPerBatch, i));
                for (int j = 0; j < numberOfDocumentsPerBatch; j++)
                {
                    string partitionKeyValue = (prefix + j).ToString();
                    string id = partitionKeyValue;

                    updateItemsInBatch.Add(new UpdateItem(id, partitionKeyValue, updateOperations));
                }

                // Invoke bulk update API.

                var tasks = new List <Task>();

                tasks.Add(Task.Run(async() =>
                {
                    Trace.TraceInformation(String.Format("Executing bulk update for batch {0}", i));
                    do
                    {
                        try
                        {
                            bulkUpdateResponse = await bulkExecutor.BulkUpdateAsync(
                                updateItems: updateItemsInBatch,
                                maxConcurrencyPerPartitionKeyRange: null,
                                cancellationToken: token);
                        }
                        catch (DocumentClientException de)
                        {
                            Trace.TraceError("Document client exception: {0}", de);
                            break;
                        }
                        catch (Exception e)
                        {
                            Trace.TraceError("Exception: {0}", e);
                            break;
                        }
                    } while (bulkUpdateResponse.NumberOfDocumentsUpdated < updateItemsInBatch.Count);

                    Trace.WriteLine(String.Format("\nSummary for batch {0}:", i));
                    Trace.WriteLine("--------------------------------------------------------------------- ");
                    Trace.WriteLine(String.Format("Updated {0} docs @ {1} updates/s, {2} RU/s in {3} sec",
                                                  bulkUpdateResponse.NumberOfDocumentsUpdated,
                                                  Math.Round(bulkUpdateResponse.NumberOfDocumentsUpdated / bulkUpdateResponse.TotalTimeTaken.TotalSeconds),
                                                  Math.Round(bulkUpdateResponse.TotalRequestUnitsConsumed / bulkUpdateResponse.TotalTimeTaken.TotalSeconds),
                                                  bulkUpdateResponse.TotalTimeTaken.TotalSeconds));
                    Trace.WriteLine(String.Format("Average RU consumption per document update: {0}",
                                                  (bulkUpdateResponse.TotalRequestUnitsConsumed / bulkUpdateResponse.NumberOfDocumentsUpdated)));
                    Trace.WriteLine("---------------------------------------------------------------------\n ");

                    totalNumberOfDocumentsUpdated += bulkUpdateResponse.NumberOfDocumentsUpdated;
                    totalRequestUnitsConsumed     += bulkUpdateResponse.TotalRequestUnitsConsumed;
                    totalTimeTakenSec             += bulkUpdateResponse.TotalTimeTaken.TotalSeconds;
                },
                                   token));

                /*
                 * tasks.Add(Task.Run(() =>
                 * {
                 *  char ch = Console.ReadKey(true).KeyChar;
                 *  if (ch == 'c' || ch == 'C')
                 *  {
                 *      tokenSource.Cancel();
                 *      Trace.WriteLine("\nTask cancellation requested.");
                 *  }
                 * }));
                 */

                await Task.WhenAll(tasks);
            }

            Trace.WriteLine("Overall summary of bulk update:");
            Trace.WriteLine("--------------------------------------------------------------------- ");
            Trace.WriteLine(String.Format("Updated {0} docs @ {1} update/s, {2} RU/s in {3} sec",
                                          totalNumberOfDocumentsUpdated,
                                          Math.Round(totalNumberOfDocumentsUpdated / totalTimeTakenSec),
                                          Math.Round(totalRequestUnitsConsumed / totalTimeTakenSec),
                                          totalTimeTakenSec));
            Trace.WriteLine(String.Format("Average RU consumption per document update: {0}",
                                          (totalRequestUnitsConsumed / totalNumberOfDocumentsUpdated)));
            Trace.WriteLine("--------------------------------------------------------------------- \n");

            //-----------------------------------------------------------------------------------------------

            // Cleanup on finish if set in config.

            if (bool.Parse(ConfigurationManager.AppSettings["ShouldCleanupOnFinish"]))
            {
                Trace.TraceInformation("Deleting Database {0}", DatabaseName);
                await client.DeleteDatabaseAsync(UriFactory.CreateDatabaseUri(DatabaseName));
            }

            Trace.WriteLine("\nPress any key to exit.");
            Console.ReadKey();
        }
Esempio n. 30
0
 public BulkExecutorWrapper(BulkExecutor bulkExecutor)
 {
     this.bulkExecutor = bulkExecutor;
 }