private BulkCopyRowsCopied ProviderSpecificCopyImpl <T>(DataConnection dataConnection, ITable <T> table, BulkCopyOptions options, IEnumerable <T> source)
            where T : notnull
        {
            var connection = _provider.TryGetProviderConnection(dataConnection, dataConnection.Connection);

            if (connection == null)
            {
                return(MultipleRowsCopy(table, options, source));
            }

            var sqlBuilder = (PostgreSQLSqlBuilder)_provider.CreateSqlBuilder(table.DataContext.MappingSchema);
            var ed         = table.DataContext.MappingSchema.GetEntityDescriptor(typeof(T));
            var tableName  = GetTableName(sqlBuilder, options, table);
            var columns    = ed.Columns.Where(c => !c.SkipOnInsert || options.KeepIdentity == true && c.IsIdentity).ToArray();

            var(npgsqlTypes, dbTypes, columnTypes) = BuildTypes(_provider.Adapter, sqlBuilder, columns);

            var fields      = string.Join(", ", columns.Select(column => sqlBuilder.ConvertInline(column.ColumnName, ConvertType.NameToQueryField)));
            var copyCommand = $"COPY {tableName} ({fields}) FROM STDIN (FORMAT BINARY)";

            // batch size numbers not based on any strong grounds as I didn't found any recommendations for it
            var batchSize = Math.Max(10, options.MaxBatchSize ?? 10000);

            var writer = _provider.Adapter.BeginBinaryImport(connection, copyCommand);

            return(ProviderSpecificCopySyncImpl(dataConnection, options, source, connection, tableName, columns, columnTypes, npgsqlTypes, dbTypes, copyCommand, batchSize, writer));
        }
Exemple #2
0
        private BulkCopyRowsCopied ProviderSpecificCopyImpl <T>(DataConnection dataConnection, ITable <T> table, BulkCopyOptions options, IEnumerable <T> source)
        {
            var connection = _provider.TryGetProviderConnection(dataConnection.Connection, dataConnection.MappingSchema);

            if (connection == null)
            {
                return(MultipleRowsCopy(table, options, source));
            }

            var sqlBuilder = (BasicSqlBuilder)_provider.CreateSqlBuilder(dataConnection.MappingSchema);
            var ed         = dataConnection.MappingSchema.GetEntityDescriptor(typeof(T));
            var tableName  = GetTableName(sqlBuilder, options, table);
            var columns    = ed.Columns.Where(c => !c.SkipOnInsert || options.KeepIdentity == true && c.IsIdentity).ToArray();

            var npgsqlTypes = new NpgsqlProviderAdapter.NpgsqlDbType[columns.Length];

            for (var i = 0; i < columns.Length; i++)
            {
                var npgsqlType = _provider.GetNativeType(columns[i].DbType, true);
                if (npgsqlType == null)
                {
                    var columnType = columns[i].DataType != DataType.Undefined ? new SqlQuery.SqlDataType(columns[i]) : null;

                    if (columnType == null || columnType.Type.DataType == DataType.Undefined)
                    {
                        columnType = columns[i].MappingSchema.GetDataType(columns[i].StorageType);
                    }

                    var sb = new System.Text.StringBuilder();
                    sqlBuilder.BuildTypeName(sb, columnType);
                    npgsqlType = _provider.GetNativeType(sb.ToString(), true);
                }

                if (npgsqlType == null)
                {
                    throw new LinqToDBException($"Cannot guess PostgreSQL type for column {columns[i].ColumnName}. Specify type explicitly in column mapping.");
                }

                npgsqlTypes[i] = npgsqlType.Value;
            }

            var fields      = string.Join(", ", columns.Select(column => sqlBuilder.ConvertInline(column.ColumnName, ConvertType.NameToQueryField)));
            var copyCommand = $"COPY {tableName} ({fields}) FROM STDIN (FORMAT BINARY)";

            // batch size numbers not based on any strong grounds as I didn't found any recommendations for it
            var batchSize = Math.Max(10, options.MaxBatchSize ?? 10000);

            var useComplete = _provider.Adapter.BinaryImporterHasComplete;
            var writer      = _provider.Adapter.BeginBinaryImport(connection, copyCommand);

            return(ProviderSpecificCopySyncImpl(dataConnection, options, source, connection, tableName, columns, npgsqlTypes, copyCommand, batchSize, useComplete, writer));
        }
Exemple #3
0
        protected override BulkCopyRowsCopied ProviderSpecificCopy <T>(ITable <T> table, BulkCopyOptions options, IEnumerable <T> source)
        {
            if (table.DataContext is DataConnection dataConnection)
            {
                var connection = _provider.TryGetProviderConnection(dataConnection.Connection, dataConnection.MappingSchema);

                if (connection == null)
                {
                    return(MultipleRowsCopy(table, options, source));
                }

                var sqlBuilder = (BasicSqlBuilder)_provider.CreateSqlBuilder(dataConnection.MappingSchema);
                var ed         = dataConnection.MappingSchema.GetEntityDescriptor(typeof(T));
                var tableName  = GetTableName(sqlBuilder, options, table);
                var columns    = ed.Columns.Where(c => !c.SkipOnInsert || options.KeepIdentity == true && c.IsIdentity).ToArray();

                var fields      = string.Join(", ", columns.Select(column => sqlBuilder.ConvertInline(column.ColumnName, ConvertType.NameToQueryField)));
                var copyCommand = $"COPY {tableName} ({fields}) FROM STDIN (FORMAT BINARY)";

                var rowsCopied = new BulkCopyRowsCopied();
                // batch size numbers not based on any strong grounds as I didn't found any recommendations for it
                var batchSize    = Math.Max(10, options.MaxBatchSize ?? 10000);
                var currentCount = 0;

                var key       = new { Type = typeof(T), options.KeepIdentity, ed };
                var rowWriter = (Action <MappingSchema, NpgsqlProviderAdapter.NpgsqlBinaryImporter, ColumnDescriptor[], T>)_rowWriterCache.GetOrAdd(
                    key,
                    _ => _provider.Adapter.CreateBinaryImportRowWriter <T>(_provider, sqlBuilder, columns, dataConnection.MappingSchema));

                var useComplete = _provider.Adapter.BinaryImporterHasComplete;
                var writer      = _provider.Adapter.BeginBinaryImport(connection, copyCommand);

                try
                {
                    foreach (var item in source)
                    {
                        rowWriter(dataConnection.MappingSchema, writer, columns, item);

                        currentCount++;
                        rowsCopied.RowsCopied++;

                        if (options.NotifyAfter != 0 && options.RowsCopiedCallback != null && rowsCopied.RowsCopied % options.NotifyAfter == 0)
                        {
                            options.RowsCopiedCallback(rowsCopied);

                            if (rowsCopied.Abort)
                            {
                                if (!useComplete)
                                {
                                    writer.Cancel();
                                }
                                break;
                            }
                        }

                        if (currentCount >= batchSize)
                        {
                            if (useComplete)
                            {
                                writer.Complete();
                            }

                            writer.Dispose();

                            writer       = _provider.Adapter.BeginBinaryImport(connection, copyCommand);
                            currentCount = 0;
                        }
                    }

                    if (!rowsCopied.Abort)
                    {
                        TraceAction(
                            dataConnection,
                            () => "INSERT BULK " + tableName + "(" + string.Join(", ", columns.Select(x => x.ColumnName)) + Environment.NewLine,
                            () => {
                            if (useComplete)
                            {
                                writer.Complete();
                            }
                            return((int)rowsCopied.RowsCopied);
                        });
                    }

                    if (options.NotifyAfter != 0 && options.RowsCopiedCallback != null)
                    {
                        options.RowsCopiedCallback(rowsCopied);
                    }
                }
                catch when(!useComplete)
                {
                    writer.Cancel();
                    throw;
                }
                finally
                {
                    writer.Dispose();
                }

                return(rowsCopied);
            }

            return(MultipleRowsCopy(table, options, source));
        }