Exemple #1
0
        public async Task BatchAsyncTest()
        {
            var countTo10   = AsyncEnumerableRange(10);
            var enumerables = EnumerableHelper.Batch(countTo10, 3);
            var finalList   = new List <List <int> >();

            await foreach (var enumerable2 in enumerables)
            {
                finalList.Add(await enumerable2.ToListAsync());
            }
            Assert.AreEqual(new int[][] { new[] { 0, 1, 2 }, new[] { 3, 4, 5 }, new[] { 6, 7, 8 }, new[] { 9 } }, finalList);
        }
Exemple #2
0
        public void BatchTest()
        {
            var countTo10   = Enumerable.Range(0, 10);
            var enumerables = EnumerableHelper.Batch(countTo10, 3);
            var finalList   = new List <List <int> >();

            foreach (var enumerable2 in enumerables)
            {
                finalList.Add(enumerable2.ToList());
            }
            Assert.AreEqual(new int[][] { new[] { 0, 1, 2 }, new[] { 3, 4, 5 }, new[] { 6, 7, 8 }, new[] { 9 } }, finalList);
        }
Exemple #3
0
        public void BatchThrowsWhenEnumerating2ndTimeTest()
        {
            var countTo10   = Enumerable.Range(0, 10);
            var enumerables = EnumerableHelper.Batch(countTo10, 3);

            foreach (var enumerable2 in enumerables)
            {
                var array1 = enumerable2.ToList();
                Assert.AreEqual(new int[] { 0, 1, 2 }, array1);
                Assert.Throws <InvalidOperationException>(() =>
                {
                    var array2 = enumerable2.ToList();
                });
                return;
            }
        }
Exemple #4
0
        public async Task BatchAsyncThrowsWhenEnumerating2ndTimeTest()
        {
            var countTo10   = AsyncEnumerableRange(10);
            var enumerables = EnumerableHelper.Batch(countTo10, 3);

            await foreach (var enumerable2 in enumerables)
            {
                var array1 = await enumerable2.ToListAsync();

                Assert.AreEqual(new int[] { 0, 1, 2 }, array1);
                Assert.ThrowsAsync <InvalidOperationException>(async() =>
                {
                    var array2 = await enumerable2.ToListAsync();
                });
                return;
            }
        }
        private async Task <BulkCopyRowsCopied> ProviderSpecificCopyInternal <T>(
            ProviderConnections providerConnections,
            ITable <T> table,
            BulkCopyOptions options,
            IAsyncEnumerable <T> source,
            CancellationToken cancellationToken)
        {
            var dataConnection = providerConnections.DataConnection;
            var connection     = providerConnections.ProviderConnection;
            var transaction    = providerConnections.ProviderTransaction;
            var ed             = dataConnection.MappingSchema.GetEntityDescriptor(typeof(T));
            var columns        = ed.Columns.Where(c => !c.SkipOnInsert || options.KeepIdentity == true && c.IsIdentity).ToList();
            var sb             = _provider.CreateSqlBuilder(dataConnection.MappingSchema);
            var rc             = new BulkCopyRowsCopied();

            var bc = _provider.Adapter.BulkCopy !.Create(connection, transaction);

            if (options.NotifyAfter != 0 && options.RowsCopiedCallback != null)
            {
                bc.NotifyAfter = options.NotifyAfter;

                bc.MySqlRowsCopied += (sender, args) =>
                {
                    rc.RowsCopied += args.RowsCopied;
                    options.RowsCopiedCallback(rc);
                    if (rc.Abort)
                    {
                        args.Abort = true;
                    }
                };
            }

            if (options.BulkCopyTimeout.HasValue)
            {
                bc.BulkCopyTimeout = options.BulkCopyTimeout.Value;
            }

            var tableName = GetTableName(sb, options, table);

            bc.DestinationTableName = GetTableName(sb, options, table);

            for (var i = 0; i < columns.Count; i++)
            {
                bc.AddColumnMapping(_provider.Adapter.BulkCopy.CreateColumnMapping(i, columns[i].ColumnName));
            }

            // emulate missing BatchSize property
            // this is needed, because MySql fails on big batches, so users should be able to limit batch size
            var batches = EnumerableHelper.Batch(source, options.MaxBatchSize ?? int.MaxValue);

            await foreach (var batch in batches.WithCancellation(cancellationToken).ConfigureAwait(Common.Configuration.ContinueOnCapturedContext))
            {
                var rd = new BulkCopyReader <T>(dataConnection, columns, batch, cancellationToken);

                await TraceActionAsync(
                    dataConnection,
                    () => "INSERT BULK " + tableName + "(" + string.Join(", ", columns.Select(x => x.ColumnName)) + Environment.NewLine,
                    async() => {
                    if (bc.CanWriteToServerAsync2)
                    {
                        await bc.WriteToServerAsync2(rd, cancellationToken).ConfigureAwait(Common.Configuration.ContinueOnCapturedContext);
                    }
                    else
                    if (bc.CanWriteToServerAsync)
                    {
                        await bc.WriteToServerAsync(rd, cancellationToken).ConfigureAwait(Common.Configuration.ContinueOnCapturedContext);
                    }
                    else
                    {
                        bc.WriteToServer(rd);
                    }
                    return(rd.Count);
                }).ConfigureAwait(Common.Configuration.ContinueOnCapturedContext);

                rc.RowsCopied += rd.Count;
            }

            if (options.NotifyAfter != 0 && options.RowsCopiedCallback != null)
            {
                options.RowsCopiedCallback(rc);
            }

            return(rc);
        }
Exemple #6
0
        protected override BulkCopyRowsCopied ProviderSpecificCopy <T>(
            ITable <T> table,
            BulkCopyOptions options,
            IEnumerable <T> source)
        {
            if (_provider.Adapter.BulkCopy != null && table.DataContext is DataConnection dataConnection)
            {
                var connection = _provider.TryGetProviderConnection(dataConnection.Connection, dataConnection.MappingSchema);

                var transaction = dataConnection.Transaction;
                if (connection != null && transaction != null)
                {
                    transaction = _provider.TryGetProviderTransaction(transaction, dataConnection.MappingSchema);
                }

                if (connection != null && (dataConnection.Transaction == null || transaction != null))
                {
                    var ed      = dataConnection.MappingSchema.GetEntityDescriptor(typeof(T));
                    var columns = ed.Columns.Where(c => !c.SkipOnInsert || options.KeepIdentity == true && c.IsIdentity).ToList();
                    var sb      = _provider.CreateSqlBuilder(dataConnection.MappingSchema);
                    var rc      = new BulkCopyRowsCopied();

                    var bc = _provider.Adapter.BulkCopy.Create(connection, transaction);
                    if (options.NotifyAfter != 0 && options.RowsCopiedCallback != null)
                    {
                        bc.NotifyAfter = options.NotifyAfter;

                        bc.MySqlRowsCopied += (sender, args) =>
                        {
                            rc.RowsCopied += args.RowsCopied;
                            options.RowsCopiedCallback(rc);
                            if (rc.Abort)
                            {
                                args.Abort = true;
                            }
                        };
                    }

                    if (options.BulkCopyTimeout.HasValue)
                    {
                        bc.BulkCopyTimeout = options.BulkCopyTimeout.Value;
                    }

                    var tableName = GetTableName(sb, options, table);

                    bc.DestinationTableName = GetTableName(sb, options, table);

                    for (var i = 0; i < columns.Count; i++)
                    {
                        bc.AddColumnMapping(_provider.Adapter.BulkCopy.CreateColumnMapping(i, columns[i].ColumnName));
                    }

                    // emulate missing BatchSize property
                    // this is needed, because MySql fails on big batches, so users should be able to limit batch size
                    foreach (var batch in EnumerableHelper.Batch(source, options.MaxBatchSize ?? int.MaxValue))
                    {
                        var rd = new BulkCopyReader(dataConnection, columns, batch);

                        TraceAction(
                            dataConnection,
                            () => "INSERT BULK " + tableName + "(" + string.Join(", ", columns.Select(x => x.ColumnName)) + Environment.NewLine,
                            () => { bc.WriteToServer(rd); return(rd.Count); });

                        rc.RowsCopied += rd.Count;
                    }

                    if (options.NotifyAfter != 0 && options.RowsCopiedCallback != null)
                    {
                        options.RowsCopiedCallback(rc);
                    }

                    return(rc);
                }
            }

            return(MultipleRowsCopy(table, options, source));
        }