public async Task DataflowPipeBulkInsertBlockAsync(int seqNumber = 1, int orderCount = 4000) { if (!_isInitialize) { throw new InvalidOperationException("not initialize"); } #region Block Method (int blockCount, int minPerBlock, int maxPerBlock)blockBascInfo = BlockHelper.GetBasciBlockInfo(orderCount); int[] blockInfos = BlockHelper.GetBlockInfo(messageCount: orderCount, blockCount: blockBascInfo.blockCount, minPerBlock: blockBascInfo.minPerBlock, maxPerBlock: blockBascInfo.maxPerBlock); #endregion int boundedCapacity = blockInfos.Sum(b => b); Debug.Assert(orderCount == boundedCapacity); var logger = _loggerFactory.CreateLogger($"DataflowPipeBulkInser-{seqNumber}:{orderCount}"); try { var dataflowPipeBulkInserter = this.ServiceProvider.GetRequiredService <IDataflowPipeBulkInserter <OrderDto, OrderDto> >(); var contosoDataSourceFactory = this.ServiceProvider.GetRequiredService <IDataSourceFactory <IContosoDataSource> >(); var orderDataSource = contosoDataSourceFactory.Current.OrderDataSource; _totalCount = 0; var transportTimeWatcher = Stopwatch.StartNew(); TimeSpan totalTransportTime = TimeSpan.Zero; var executionTimeWatcher = Stopwatch.StartNew(); logger.LogInformation($"----begin dataflow pipe bulk insert {orderCount} orders,now:{DateTime.Now.TimeOfDay}----"); int start = 0; for (int i = 0; i < blockInfos.Count(); i++) { int insertOrderCount = blockInfos[i]; var orders = OrderJsonProvider.CreateOrders(start, insertOrderCount); start += insertOrderCount; transportTimeWatcher.Restart(); var dbOrders = await orderDataSource.DataflowPipeBulkInsertOrdersAsync(orders); totalTransportTime += transportTimeWatcher.Elapsed; transportTimeWatcher.Reset(); if (dbOrders?.Count() > 0) { await ProcessDataflowPipeOrdersAsync(dbOrders); } } logger .LogInformation($"----dataflow pipe bulk insert {orderCount} orders,cost time:\"{executionTimeWatcher.Elapsed}\",transport time:{ totalTransportTime },count/time(sec):{Math.Ceiling(orderCount / totalTransportTime.TotalSeconds)},now:\"{DateTime.Now.TimeOfDay}\"----"); } catch (Exception ex) { logger.LogError($"Error while dataflow pipe bulk insert: {ex.Message}"); } }
public async Task PipeBulkInsertBlockCoreAsync(AsyncCountdownEvent signals, int index, int orderCount) { #region Block Method (int blockCount, int minPerBlock, int maxPerBlock)blockBascInfo = BlockHelper.GetBasciBlockInfo(orderCount); int[] blockInfos = BlockHelper.GetBlockInfo(messageCount: orderCount, blockCount: blockBascInfo.blockCount, minPerBlock: blockBascInfo.minPerBlock, maxPerBlock: blockBascInfo.maxPerBlock); #endregion int boundedCapacity = blockInfos.Sum(b => b); Debug.Assert(orderCount == boundedCapacity); var logger = _loggerFactory.CreateLogger($"PipeBulkInserter-{index}"); try { var contosoDataSourceFactory = this.ServiceProvider.GetRequiredService <IDataSourceFactory <IContosoDataSource> >(); var orderDataSource = contosoDataSourceFactory.Current.OrderDataSource; var transportTimeWatcher = Stopwatch.StartNew(); TimeSpan totalTransportTime = TimeSpan.Zero; var executionTimeWatcher = Stopwatch.StartNew(); logger.LogInformation($"----begin pipe bulk insert {orderCount} orders,now:{DateTime.Now.TimeOfDay}----"); int start = 0; for (int i = 0; i < blockInfos.Count(); i++) { int insertOrderCount = blockInfos[i]; var orders = OrderJsonProvider.CreateOrders(start, insertOrderCount); start += insertOrderCount; transportTimeWatcher.Restart(); var dbOrders = await orderDataSource.PipeBulkInsertOrdersAsync(orders); totalTransportTime += transportTimeWatcher.Elapsed; transportTimeWatcher.Reset(); //if (dbOrders?.Count() > 0) //{ // await ProcessPipeOrdersAsync(dbOrders); //} } logger .LogInformation($"----pipe bulk insert {orderCount} orders,cost time:\"{executionTimeWatcher.Elapsed}\",transport time:{ totalTransportTime },count/time(sec):{Math.Ceiling(orderCount / totalTransportTime.TotalSeconds)},now:\"{DateTime.Now.TimeOfDay}\"----"); signals?.Signal(); } catch (Exception ex) { logger.LogError($"Error while pipe bulk insert orders of {nameof(PipeBulkInsertLoopCoreAsync)}: {ex.Message}"); } }
public async Task DataflowBulkInsertBlockRetryTasksCoreAsync(AsyncCountdownEvent signals, int index, int count, int orderCount) { var logger = _loggerFactory.CreateLogger($"DataflowBulkInserter-TaskCount:{index}"); int totalOrderCount = count * orderCount; try { var dataflowBulkInserter = this.ServiceProvider.GetRequiredService<IDataflowBulkInserter<OrderDto, OrderDto>>(); var contosoDataSourceFactory = this.ServiceProvider.GetRequiredService<IDataSourceFactory<IContosoDataSource>>(); var orderDataSource = contosoDataSourceFactory.Current.OrderDataSource; // await Task.Delay(TimeSpan.FromMilliseconds(2000)); var transportTimeWatcher = Stopwatch.StartNew(); TimeSpan totalTransportTime = TimeSpan.Zero; var executionTimeWatcher = Stopwatch.StartNew(); // logger.LogInformation($"----begin dataflow bulk insert { totalOrderCount} orders,now:{DateTime.Now.TimeOfDay}----"); int start = 0; for (int i = 0; i < count; i++) { var orders = OrderJsonProvider.CreateOrders(start, orderCount); start += orderCount; transportTimeWatcher.Restart(); var dbOrders = await orderDataSource.DataflowBulkInsertOrdersAsync(orders); totalTransportTime += transportTimeWatcher.Elapsed; transportTimeWatcher.Reset(); if (dbOrders?.Count() > 0) { await ProcessDataflowOrdersAsync(dbOrders); } } //logger // .LogInformation($"----dataflow bulk insert {totalOrderCount} orders,cost time:\"{executionTimeWatcher.Elapsed}\",transport time:{ totalTransportTime },count/time(sec):{Math.Ceiling(totalOrderCount / totalTransportTime.TotalSeconds)},now:\"{DateTime.Now.TimeOfDay}\"----"); signals?.Signal(); } catch (Exception ex) { logger.LogError($"Error while dataflow bulk insert orders of {nameof(DataflowBulkInsertBlockRetryTasksCoreAsync)}: {ex.Message}"); } }
public async Task PipeBulkInsertLoopCoreAsync(AsyncCountdownEvent signals, int index, int count, int orderCount) { var logger = _loggerFactory.CreateLogger($"PipeBulkInserter-{index}"); try { var contosoDataSourceFactory = this.ServiceProvider.GetRequiredService <IDataSourceFactory <IContosoDataSource> >(); var orderDataSource = contosoDataSourceFactory.Current.OrderDataSource; var transportTimeWatcher = Stopwatch.StartNew(); TimeSpan totalTransportTime = TimeSpan.Zero; var executionTimeWatcher = Stopwatch.StartNew(); logger.LogInformation($"----begin pipe bulk insert {orderCount} orders,now:{DateTime.Now.TimeOfDay}----"); int start = 0; for (int i = 0; i < count; i++) { var orders = OrderJsonProvider.CreateOrders(start, orderCount); start += orderCount; transportTimeWatcher.Restart(); var dbOrders = await orderDataSource.PipeBulkInsertOrdersAsync(orders); totalTransportTime += transportTimeWatcher.Elapsed; transportTimeWatcher.Reset(); //if (dbOrders?.Count() > 0) //{ // await ProcessPipeOrdersAsync(dbOrders); //} } logger .LogInformation($"----pipe bulk insert {orderCount} orders,cost time:\"{executionTimeWatcher.Elapsed}\",transport time:{ totalTransportTime },count/time(sec):{Math.Ceiling(orderCount / totalTransportTime.TotalSeconds)},now:\"{DateTime.Now.TimeOfDay}\"----"); signals?.Signal(); } catch (Exception ex) { logger.LogError($"Error while pipe bulk insert orders of {nameof(PipeBulkInsertLoopCoreAsync)}: {ex.Message}"); } }
public Task InitializeDataflowBulkInsertBlockTimerAsync(double frequency = 2000d, int maxRetryCount = 10, int orderCount = 1000) { if (!_isInitialize) { throw new InvalidOperationException("not initialize"); } _frequency = frequency; _period = TimeSpan.FromMilliseconds(frequency); _maxRetryCount = maxRetryCount; _totalOrderCount = _maxRetryCount * orderCount; var logger = _loggerFactory.CreateLogger($"DataflowBulkInser-Timer:{_period.Milliseconds}-OrderCount:{ _totalOrderCount}"); int currentRetryCount = 0; try { var contosoDataSourceFactory = this.ServiceProvider.GetRequiredService <IDataSourceFactory <IContosoDataSource> >(); var orderDataSource = contosoDataSourceFactory.Current.OrderDataSource; _transportTimeWatcher = Stopwatch.StartNew(); _totalTransportTime = TimeSpan.Zero; _executionTimeWatcher = Stopwatch.StartNew(); int start = 0; //dueTime:调用callback之前延迟的时间量(以毫秒为单位),指定 Timeout.Infinite 以防止计时器开始计时。指定零 (0) 以立即启动计时器 //period:定时的时间时隔,以毫秒为单位 _timer = new Timer(async(state) => { var orders = OrderJsonProvider.CreateOrders(start, orderCount); start += orderCount; _transportTimeWatcher.Restart(); var dbOrders = await orderDataSource.DataflowBulkInsertOrdersAsync(orders); _totalTransportTime += _transportTimeWatcher.Elapsed; _transportTimeWatcher.Reset(); //currentRetryTime = transportTimeWatcher.Elapsed; //if (currentRetryTime >= maxRetryTime) //{ // _timer.Change(Timeout.Infinite, Timeout.Infinite); //} currentRetryCount++; if (currentRetryCount >= _maxRetryCount) { logger .LogInformation($"----dataflow bulk insert {_totalOrderCount} orders,cost time:\"{_executionTimeWatcher.Elapsed}\",transport time:{ _totalTransportTime },count/time(sec):{Math.Ceiling(_totalOrderCount / _totalTransportTime.TotalSeconds)},now:\"{DateTime.Now.TimeOfDay}\"----"); _timer.Change(Timeout.Infinite, Timeout.Infinite); _timer.Dispose(); } }, this, Timeout.Infinite, Timeout.Infinite); } catch (Exception ex) { logger.LogError($"Error while dataflow bulk insert orders of {nameof(InitializeDataflowBulkInsertBlockTimerAsync)}: {ex.Message}"); } return(Task.CompletedTask); }
public async Task InitializeTask() { try { _isInitialize = true; _mutex = new AsyncLock(); _mutexRead = new AsyncLock(); //1. Services = new ServiceCollection(); this.Services.AddOptions(); //2.Configuration //string basePath = Directory.GetCurrentDirectory() + @"\..\..\..\.."; string basePath = Directory.GetCurrentDirectory(); this.ConfigurationBuilder = new ConfigurationBuilder() .SetBasePath(basePath) .AddJsonFile(path: "appsettings.json", optional: true, reloadOnChange: false); this.Configuration = ConfigurationBuilder.Build(); Services.Configure <PipeSettings>((pipeSettings) => { this.Configuration.Bind("Pipe", pipeSettings); }); Services.Configure <BlockSettings>((blockSettings) => { this.Configuration.Bind("Block", blockSettings); }); Services.Configure <DataProviderSettings>((dataProviderSettings) => { this.Configuration.Bind("DataProvider", dataProviderSettings); }); Services.Configure <DataSourceTypeSettings>((dataSourceTypeSettings) => { this.Configuration.Bind("DataSource", dataSourceTypeSettings); }); //3.Logging Services.AddLogging(builder => { var loggingSection = this.Configuration.GetSection("Logging"); var includeScopes = loggingSection.GetValue <bool>("IncludeScopes"); builder.AddConfiguration(loggingSection); //加入一个ConsoleLoggerProvider builder.AddConsole(consoleLoggerOptions => { consoleLoggerOptions.IncludeScopes = includeScopes; }); //加入一个DebugLoggerProvider builder.AddDebug(); }); //DbContext Services.AddScoped <ContosoContext, SqliteContosoContext>((sp) => { var logFactory = sp.GetRequiredService <ILoggerFactory>(); string sqliteConnectionString = this.Configuration.GetConnectionString("ContosoSqlite"); //var sqliteContosoContext = new SqliteContosoContext(new DbContextOptionsBuilder<ContosoContext>().UseLoggerFactory(logFactory) // .UseSqlite(sqliteConnectionString).Options); var sqliteContosoContext = new SqliteContosoContext(new DbContextOptionsBuilder <ContosoContext>() .UseSqlite(sqliteConnectionString).Options); return(sqliteContosoContext); }); Services.AddScoped <ContosoContext, SqlServerContosoContext>((sp) => { var logFactory = sp.GetRequiredService <ILoggerFactory>(); string sqlServerConnectString = this.Configuration.GetConnectionString("ContosoSqlServer"); //var sqlServerContosoContext = new SqlServerContosoContext(new DbContextOptionsBuilder<ContosoContext>().UseLoggerFactory(logFactory) // .UseSqlServer(sqlServerConnectString).Options); var sqlServerContosoContext = new SqlServerContosoContext(new DbContextOptionsBuilder <ContosoContext>() .UseSqlServer(sqlServerConnectString).Options); return(sqlServerContosoContext); }); Services.AddScoped <IDbContextFactory <ContosoContext>, DbContextFactory>(); Services.AddScoped(typeof(IDataflowBulkInserter <,>), typeof(DataflowBulkInserter <,>)); Services.AddScoped(typeof(IDataflowPipeBulkInserter <,>), typeof(DataflowPipeBulkInserter <,>)); Services.AddScoped(typeof(IPipeBulkInserter <,>), typeof(PipeBulkInserter <,>)); //Services.AddScoped<IDataflowBulkInserter<Order, Order>, DataflowBulkInserter<Order, Order>>(); // Services.AddScoped<IDataflowPipeBulkInserter<Order, Order>, DataflowPipeBulkInserter<Order, Order>>(); // Services.AddScoped<IPipeBulkInserter<Order, Order>, PipeBulkInserter<Order, Order>>(); //Repository Services.AddScoped <ISqlOrderRepository, SqlOrderRepository>(); //Mapper Services.AddAutoMapper(typeof(Contoso.DataSource.AutoMapper.AutoMapperProfileConfiguration)); //DataSource Services.AddScoped <ISqlServerOrderDataSource, DataSource.SqlServer.SqlServerOrderDataSource>(); //DataSourceFactory Services.AddScoped <IContosoDataSource, SqlServerContosoDataSource>(); Services.AddScoped <IDataSourceFactory <IContosoDataSource>, ContosoDataSourceFactory>(); // Services.AddSingleton<IPipeWebApiSender<PurchaseOrderDto, int>, PipeWebApiSender<PurchaseOrderDto, int>>(); //4. this.ServiceProvider = this.Services.BuildServiceProvider(); //5. _loggerFactory = ServiceProvider.GetRequiredService <ILoggerFactory>(); _logger = _loggerFactory.CreateLogger <WebApiSender>(); var repositoryFactory = this.ServiceProvider.GetRequiredService <IDbContextFactory <ContosoContext> >(); var dbContext = repositoryFactory.CreateDbContext(); var dataflowBulkInserter = this.ServiceProvider.GetRequiredService <IDataflowBulkInserter <OrderDto, OrderDto> >(); var dataflowPipeBulkInserter = this.ServiceProvider.GetRequiredService <IDataflowPipeBulkInserter <OrderDto, OrderDto> >(); var pipeBulkInserter = this.ServiceProvider.GetRequiredService <IPipeBulkInserter <OrderDto, OrderDto> >(); var mapper = this.ServiceProvider.GetRequiredService <IMapper>(); var contosoDataSourceFactory = this.ServiceProvider.GetRequiredService <IDataSourceFactory <IContosoDataSource> >(); var orderDataSource = contosoDataSourceFactory.Current.OrderDataSource; _cancellationTokenSource = new CancellationTokenSource(); _durationManage = new DurationManage(); await OrderJsonProvider.InitializeTask(); // return Task.CompletedTask; } catch (Exception ex) { Console.WriteLine(ex.Message); // return Task.FromException(ex); } }
public async Task DataflowBulkInsertBlockRetryAsync(int seqNumber = 1, int maxRetryCount = 200, int orderCount = 1000) { if (!_isInitialize) { throw new InvalidOperationException("not initialize"); } #region Block Method (int blockCount, int minPerBlock, int maxPerBlock)blockBascInfo = BlockHelper.GetBasciBlockInfo(orderCount); int[] blockInfos = BlockHelper.GetBlockInfo(messageCount: orderCount, blockCount: blockBascInfo.blockCount, minPerBlock: blockBascInfo.minPerBlock, maxPerBlock: blockBascInfo.maxPerBlock); #endregion int boundedCapacity = blockInfos.Sum(b => b); Debug.Assert(orderCount == boundedCapacity); var logger = _loggerFactory.CreateLogger($"DataflowBulkInser-{seqNumber}:{maxRetryCount * orderCount}"); int currentRetryCount = 0; try { var dataflowBulkInserter = this.ServiceProvider.GetRequiredService <IDataflowBulkInserter <OrderDto, OrderDto> >(); var contosoDataSourceFactory = this.ServiceProvider.GetRequiredService <IDataSourceFactory <IContosoDataSource> >(); var orderDataSource = contosoDataSourceFactory.Current.OrderDataSource; TimeSpan period = TimeSpan.FromMilliseconds(50); _totalCount = 0; var transportTimeWatcher = Stopwatch.StartNew(); TimeSpan totalTransportTime = TimeSpan.Zero; var executionTimeWatcher = Stopwatch.StartNew(); logger.LogInformation($"----begin dataflow bulk insert {maxRetryCount * orderCount} orders,now:{DateTime.Now.TimeOfDay}----"); while (true) { if (_cancellationTokenSource.Token.IsCancellationRequested) { _cancellationTokenSource.Token.ThrowIfCancellationRequested(); // _logger?.LogCritical($"message was cancelled"); break; } int start = 0; for (int i = 0; i < blockInfos.Count(); i++) { int insertOrderCount = blockInfos[i]; var orders = OrderJsonProvider.CreateOrders(start, insertOrderCount); start += insertOrderCount; transportTimeWatcher.Restart(); var dbOrders = await orderDataSource.DataflowBulkInsertOrdersAsync(orders); totalTransportTime += transportTimeWatcher.Elapsed; transportTimeWatcher.Reset(); //if (dbOrders?.Count() > 0) //{ // await ProcessDataflowOrdersAsync(dbOrders); //} } // await Task.Delay(TimeSpan.FromMilliseconds(200)); if (maxRetryCount == 99) { await Task.Delay(period); } currentRetryCount++; if (currentRetryCount >= maxRetryCount) { //_cancellationTokenSource.CancelAfter(TimeSpan.FromMilliseconds(200)); break; } } logger .LogInformation($"----dataflow bulk insert {maxRetryCount * orderCount} orders,cost time:\"{executionTimeWatcher.Elapsed}\",transport time:{ totalTransportTime },count/time(sec):{Math.Ceiling(maxRetryCount * orderCount / totalTransportTime.TotalSeconds)},now:\"{DateTime.Now.TimeOfDay}\"----"); } catch (Exception ex) { logger.LogError($"Error while dataflow bulk insert: {ex.Message}"); } }