public override bool BulkInsert(string schemaName, string tableName, CsvReader reader) { using (var cn = _connectionFactory()) { var existingTable = GetDbSchema(schemaName, tableName); var readerTable = new DbTable(tableName, schemaName); for (var i = 0; i < reader.Columns.Count; i++) { var column = reader.Columns[i]; readerTable.Columns.Add(column); } if (existingTable == null) { CreateTable(readerTable); } var table = GetDbSchema(schemaName, tableName); // Make a little cache of the pgTypes var pgTypes = table.Columns .Select(x => PostgresqlTypeConverter.Get(x.MungType).PostgresqlDbType) .ToList(); // Not all the columns in the table may be present in the actual reader, so // we insert null if they are missing. HashSet<string> actualColumns = new HashSet<string>(reader.Columns.Select(c => c.ColumnName)); using (var writer = cn.BeginBinaryImport(CopyCommand(table))) { while (reader.Read()) { writer.StartRow(); for (var i = 0; i < table.Columns.Count; i++) { var col = table.Columns[i]; if (actualColumns.Contains(col.ColumnName)) { var val = reader[col.ColumnName]; if (val == DBNull.Value) { writer.WriteNull(); } else { writer.Write(reader[col.ColumnName], pgTypes[i]); } } else { writer.WriteNull(); } } } } } return true; }
public static void Main(string[] args) { var filePath = Path.Combine(CurrentPath, "CsvFiles", "CourseAttributeExport.csv"); using (var file = new StreamReader(File.OpenRead(filePath))) { var reader = new CsvReader(file, new CsvReaderOptions() { FirstLineContainsHeaders = true, ExcelQuoted = true, SeparatorChar = ',' }); Console.WriteLine(string.Join("\t", reader.Columns.Select(c => $"{c.ColumnName}:{c.MungType}"))); while (reader.Read()) { Console.WriteLine(string.Join("\t", reader.Columns.Select(c => reader[c.ColumnName]))); } } Console.ReadKey(); }