Пример #1
0
        public bool Execute(string targetSchema, string targetTable, string filename)
        {
            using (var stream = File.OpenText(filename)) {
                using (var reader = new CsvReader(stream)) {

                    BulkInsert(targetSchema, targetTable, reader);
                }

            }
            return true;
        }
        public override bool BulkInsert(string schemaName, string tableName, CsvReader reader)
        {
            using (var cn = _connectionFactory()) {
                using (SqlBulkCopy copy = new SqlBulkCopy(cn)) {

                    var existingTable = GetDbSchema(schemaName, tableName);
                    var readerTable = new DbTable(tableName, schemaName);

                    for (var i = 0; i < reader.Columns.Count; i++) {
                        var column = reader.Columns[i];
                        readerTable.Columns.Add(column);
                    }

                    if (existingTable == null) {
                        CreateTable(readerTable);
                    }

                    var table = GetDbSchema(schemaName, tableName);

                    // Make sure the mappings are correct
                    for (var i = 0; i < reader.Columns.Count; i++) {
                        var column = reader.Columns[i].ColumnName;
                        var sourceOrdinal = i;
                        var destinationOrdinal = table.Columns.FindIndex(x => x.ColumnName == column);

                        if (destinationOrdinal == -1) {
                            var msg = string.Format("Unable to resolve column mapping, column: {0} was not found in destination table {1}",
                                column,
                                table.TableName
                            );
                            throw new Exception(msg);
                        }
                        copy.ColumnMappings.Add(new SqlBulkCopyColumnMapping(i, destinationOrdinal));
                    }

                    copy.DestinationTableName = string.Format("[{0}].[{1}]", table.SchemaName, table.TableName);

                    copy.BatchSize = 1000;
                    copy.BulkCopyTimeout = 9999999;

                    copy.WriteToServer(reader);
                }

            }

            return true;
        }
        public override bool BulkInsert(string schemaName, string tableName, CsvReader reader)
        {
            using (var cn = _connectionFactory()) {

                var existingTable = GetDbSchema(schemaName, tableName);
                var readerTable = new DbTable(tableName, schemaName);

                for (var i = 0; i < reader.Columns.Count; i++) {
                    var column = reader.Columns[i];
                    readerTable.Columns.Add(column);
                }

                if (existingTable == null) {
                    CreateTable(readerTable);
                }

                var table = GetDbSchema(schemaName, tableName);
                // Make a little cache of the pgTypes
                var pgTypes = table.Columns
                    .Select(x => PostgresqlTypeConverter.Get(x.MungType).PostgresqlDbType)
                    .ToList();

                // Not all the columns in the table may be present in the actual reader, so
                // we insert null if they are missing.
                HashSet<string> actualColumns = new HashSet<string>(reader.Columns.Select(c => c.ColumnName));
                using (var writer = cn.BeginBinaryImport(CopyCommand(table))) {
                    while (reader.Read()) {
                        writer.StartRow();
                        for (var i = 0; i < table.Columns.Count; i++) {
                            var col = table.Columns[i];
                            if (actualColumns.Contains(col.ColumnName)) {
                                var val = reader[col.ColumnName];
                                if (val == DBNull.Value) {
                                    writer.WriteNull();
                                } else {
                                    writer.Write(reader[col.ColumnName], pgTypes[i]);
                                }
                            } else {
                                writer.WriteNull();
                            }
                        }
                    }
                }
            }
            return true;
        }
Пример #4
0
        public static void Main(string[] args)
        {
            var filePath = Path.Combine(CurrentPath, "CsvFiles", "CourseAttributeExport.csv");

            using (var file = new StreamReader(File.OpenRead(filePath))) {
                var reader = new CsvReader(file, new CsvReaderOptions() {
                    FirstLineContainsHeaders = true,
                    ExcelQuoted = true,
                    SeparatorChar = ','
                });

                Console.WriteLine(string.Join("\t", reader.Columns.Select(c => $"{c.ColumnName}:{c.MungType}")));
                while (reader.Read()) {
                    Console.WriteLine(string.Join("\t", reader.Columns.Select(c => reader[c.ColumnName])));

                }
            }

            Console.ReadKey();
        }
Пример #5
0
 public abstract bool BulkInsert(string schemaName, string tableName, CsvReader reader);