private static void updateTable() { NpgsqlConnection conn = new NpgsqlConnection("Server=169.254.95.120;Port=5432;User Id=joe;Password=postgres;Database=postgres;"); conn.Open(); NpgsqlCommand command = new NpgsqlCommand("COPY myCopyTestTable FROM STDIN", conn); NpgsqlCopyIn cin = new NpgsqlCopyIn(command, conn, Console.OpenStandardInput()); // expecting input in server encoding! try { cin.Start(); } catch (Exception e) { try { cin.Cancel("Undo copy"); } catch (NpgsqlException e2) { // we should get an error in response to our cancel request: if (!("" + e2).Contains("Undo copy")) { throw new Exception("Failed to cancel copy: " + e2 + " upon failure: " + e); } } throw e; } conn.Close(); }
/// <summary> /// Inserts the /// </summary> /// <param name="qualifiedTableName"></param> /// <param name="fileName"></param> public override void InsertIntoTable(string qualifiedTableName, string fileName) { using (var iconnection = CreateConnection()) { using (var reader = OpenDataFile(fileName)) { // read the header var header = reader.ReadLine(); // get the real connection var connection = UnwrapConnection(iconnection); var command = new NpgsqlCommand(string.Format("COPY {0} FROM STDIN WITH CSV HEADER NULL '@null@' ESCAPE '\\'", qualifiedTableName), connection); var copyIn = new NpgsqlCopyIn(command, connection); var lineCount = 0; string line = string.Empty; try { copyIn.Start(); var copyInStream = copyIn.CopyStream; var copyInWriter = new StreamWriter(copyInStream, System.Text.Encoding.UTF8, 16384); // write the header copyInWriter.WriteLine(header); // stream the lines while ((line = reader.ReadLine()) != null) { // lines must be cleaned up so that nulls are transformed properly into null values line = RewriteNullValues(line); // write the line copyInWriter.WriteLine(line); lineCount++; } copyInWriter.Flush(); copyInStream.Close(); if (Logger != null) { Logger.Info("Loaded table: {0} : {1}", qualifiedTableName, lineCount); } } catch (NpgsqlException e) { copyIn.Cancel("undo copy"); if (Logger != null) { Logger.Info("Loading table: {0} : FAILURE", qualifiedTableName); Logger.Info(e.Message); Logger.Info(e.StackTrace); } } } } }
protected override void RunBulkCopy(IDataQueue queue) { int okRowCount = 0, failRowCount = 0; List <string> insertErrors = new List <string>(); ITableStructure dst = queue.GetRowFormat; var conn = (NpgsqlConnection)Connection.SystemConnection; NpgsqlCommand command = new NpgsqlCommand(Connection.Dialect.GenerateScript(d => d.Put("^copy %f (%,i) ^from ^stdin", DestinationTable.FullName, from c in dst.Columns select c.ColumnName)), conn); NpgsqlCopyIn cin = new NpgsqlCopyIn(command, conn); try { cin.Start(); var fw = new BinaryWriter(cin.CopyStream); while (!queue.IsEof) { IBedRecord rec = queue.GetRecord(); for (int i = 0; i < rec.FieldCount; i++) { if (i > 0) { fw.Write((byte)'\t'); } rec.ReadValue(i); WriteField(rec, fw); } fw.Write((byte)'\r'); fw.Write((byte)'\n'); okRowCount++; } fw.Flush(); cin.End(); } catch (Exception err) { cin.Cancel("canceled"); ProgressInfo.LogMessageDetail( "INSERT", DatAdmin.LogLevel.Error, String.Format("{0}", Texts.Get("s_error_inserting_into_table$table", "table", DestinationTable.FullName)), err.ToString()); throw; } if (failRowCount > 0) { ProgressInfo.LogMessageDetail( "INSERT", DatAdmin.LogLevel.Error, String.Format("{0}, OK:{1}, FAIL:{2}", Texts.Get("s_error_inserting_into_table$table", "table", DestinationTable.FullName), okRowCount, failRowCount), insertErrors.CreateDelimitedText("\r\n") ); } else { ProgressInfo.LogMessage("INSERT", DatAdmin.LogLevel.Info, Texts.Get("s_inserted_into_table$table$rows", "table", DestinationTable.FullName, "rows", okRowCount)); } }
static public void FailCopyInByWriting() { cs = new CountStream(); cs.FailAt = 2; cs.WrapStream = new FileStream("test_copy.cs", FileMode.Open, FileAccess.Read); cin = new NpgsqlCopyIn("COPY copy1 FROM STDIN", conn); cin.Start(); if (!cin.IsActive) { throw new Exception("Copy started inactive"); } byte[] buf = new byte[8]; int i; try { while ((i = cs.Read(buf, 0, buf.Length)) > 0) { cin.CopyStream.Write(buf, 0, i); } } catch (Exception e) { if (("" + e).Contains("Test Exception handling")) { try { cin.Cancel("Test whether copy in fails correctly"); } catch (Exception e2) { if (("" + e2).Contains("Test whether copy in fails correctly")) { Console.Out.WriteLine("Copy from writing failed as requested."); return; } throw e2; } throw new Exception("CopyIn.Cancel() didn't throw up the expected exception"); } throw e; } finally { cs.Close(); cin.End(); // should do nothing } throw new Exception("Copy from writing did not fail as requested"); }
protected void InsertDataToDbBulkMethod(DataTable table) { List <string> columns_names = new List <string>(); for (int i = 0; i < table.Columns.Count; i++) { columns_names.Add(table.Columns[i].ColumnName); } string sql = string.Format("COPY {0}({1}) FROM STDIN", table.TableName, string.Join(",", columns_names.ToArray())); _cmd = CreateCommand(sql); _cmd.CommandType = CommandType.Text; var serializer = new NpgsqlCopySerializer(_conn as NpgsqlConnection); NpgsqlCopyIn copyIn = new NpgsqlCopyIn((_cmd as NpgsqlCommand), (_conn as NpgsqlConnection), serializer.ToStream); try { copyIn.Start(); foreach (DataRow dr in table.Rows) { for (int i = 0; i < table.Columns.Count; i++) { AddValueToSerializer(serializer, dr[i]); } serializer.EndRow(); serializer.Flush(); } copyIn.End(); serializer.Close(); } catch (Exception e) { try { copyIn.Cancel("Exception has occured!"); } catch (NpgsqlException ex) { if (ex.BaseMessage.Contains("Exception has occured!")) { throw new Exception(string.Format("Copy was uncanceled. exception1: {0};exception2: {1}", e.Message, ex.Message)); } } } }
private void BulkCopy(System.Data.DataTable items, NpgsqlConnection connection, NpgsqlCommand command) { var dataTableString = DataTableToString(items); var sql = string.Format("COPY {0} (\"{1}\") FROM STDIN WITH DELIMITER '|'", command.CommandText, dataTableString); command.CommandText = sql; var copy = new NpgsqlCopyIn(command, connection); try { copy.Start(); foreach (System.Data.DataRow item in items.Rows) { var data = SerializeData(item.ItemArray); var raw = Encoding.UTF8.GetBytes(string.Concat(data, "\n")); copy.CopyStream.Write(raw, 0, raw.Length); } } catch (Exception e) { try { copy.Cancel("Undo copy"); } catch (NpgsqlException e2) { // we should get an error in response to our cancel request: if (!("" + e2).Contains("Undo copy")) { throw new PriusException("Failed to cancel copy: " + copy + " upon failure: " + e); } } throw; } finally { copy.CopyStream?.Close(); copy.End(); } }
/// <summary> /// Does the actual bulk inserts. /// </summary> /// <param name="table"></param> /// <param name="table_name"></param> /// <param name="batch_size"></param> private void BulkCopy(DataTable table, string table_name, int batch_size) { if (table != null && table.Rows.Count > 0) { // the copy command. NpgsqlCommand command = new NpgsqlCommand(string.Format( "COPY {0} FROM STDIN WITH BINARY", table_name), _connection); // the copy in stream. // TODO: convert this to binary mode for speed and // to make sure the char ` can also be included in tags! NpgsqlCopyIn cin = new NpgsqlCopyIn(command, _connection); // copy line-by-line. cin.Start(); try { System.IO.Stream target = cin.CopyStream; //Stream target = new FileInfo(@"C:\Users\ben.abelshausen\Desktop\node_osmsharp.copy").OpenWrite(); // write header. List <byte> header = new List <byte>(); header.AddRange(System.Text.Encoding.ASCII.GetBytes("PGCOPY\n")); header.Add((byte)255); header.AddRange(System.Text.Encoding.ASCII.GetBytes("\r\n\0")); header.Add((byte)0); // start of Flags field header.Add((byte)0); header.Add((byte)0); header.Add((byte)0); header.Add((byte)0); // start of Flags field header.Add((byte)0); header.Add((byte)0); header.Add((byte)0); target.Write(header.ToArray(), 0, header.Count); for (int row_idx = 0; row_idx < table.Rows.Count; row_idx++) { // for each row generate the binary data. // write the 16-bit integer count of the number of fields byte[] field_count_data = BitConverter.GetBytes((short)table.Columns.Count); this.ReverseEndianness(target, field_count_data); //target.Write(field_count_data, 0, field_count_data.Length); for (int column_idx = 0; column_idx < table.Columns.Count; column_idx++) { // serialize the data. byte[] field_data = null; object value = table.Rows[row_idx][column_idx]; bool reverse = false; if (value == null || value == DBNull.Value) { // do nothing: just leave the field_data null. } else if (value is long) { // convert the long data into bytes postgres can understand. field_data = BitConverter.GetBytes((long)value); reverse = true; } else if (value is int) { // convert the int data into bytes postgres can understand. field_data = BitConverter.GetBytes((int)value); reverse = true; } else if (value is double) { // convert the double data into bytes postgres can understand. field_data = BitConverter.GetBytes((double)value); reverse = true; } else if (value is float) { // convert the float data into bytes postgres can understand. field_data = BitConverter.GetBytes((float)value); reverse = true; } else if (value is decimal) { // convert the decimal data into bytes postgres can understand. field_data = BitConverter.GetBytes((double)value); reverse = true; } else if (value is DateTime) { // convert the string data into bytes postgres can understand. long microseconds = (long)((DateTime)value - (new DateTime(2000, 01, 01))).TotalSeconds * 1000000; //field_data = System.Text.Encoding.ASCII.GetBytes(((DateTime)value).ToString( // System.Globalization.CultureInfo.InvariantCulture)); field_data = BitConverter.GetBytes(microseconds); reverse = true; } else if (value is string) { // convert the string data into bytes postgres can understand. field_data = _encoding.GetBytes(value as string); } else if (value is bool) { // convert the bool data into bytes postgres can understand. field_data = new byte[1]; if ((bool)value) { field_data[0] = (byte)1; } else { field_data[0] = (byte)0; } } else { // the type of the value is unsupported! throw new InvalidDataException(string.Format("Data type not supported: {0}!", value.GetType())); } // write the length of the field. int length = -1; // represents NULL. if (field_data != null) { // the lenght is non-zero. length = field_data.Length; } byte[] length_data = BitConverter.GetBytes(length); this.ReverseEndianness(target, length_data); // write the data. if (field_data != null) { if (reverse) { // write the data in reverse. this.ReverseEndianness(target, field_data); } else { // write the data in order. target.Write(field_data, 0, field_data.Length); } } } if (row_idx % 100 == 0) { // flush the data once in a while. target.Flush(); } } // write the file trailer: a 16-bit integer word containing -1 byte[] trailer = BitConverter.GetBytes((short)-1); target.Write(trailer, 0, trailer.Length); // flush the stream data and close. target.Flush(); target.Close(); } catch (Exception ex) { cin.Cancel(ex.Message); } finally { cin.End(); } OsmSharp.Logging.Log.TraceEvent("OsmSharp.Data.PostgreSQL.Osm.Streams.PostgeSQLOsmStreamTarget", OsmSharp.Logging.TraceEventType.Information, "Inserted {0} records into {1}!", table.Rows.Count, table_name); } }
/// <summary> /// Bulk copies a set of objects to the server. /// </summary> /// <param name="connection">The connection to use.</param> /// <param name="tableName">The name of the table.</param> /// <param name="reader">The reader to read objects from.</param> /// <param name="configure">A callback method to configure the bulk copy object.</param> /// <param name="options">Options for initializing the bulk copy object.</param> /// <param name="transaction">An optional transaction to participate in.</param> public override void BulkCopy(IDbConnection connection, string tableName, IDataReader reader, Action <InsightBulkCopy> configure, InsightBulkCopyOptions options, IDbTransaction transaction) { if (reader == null) { throw new ArgumentNullException("reader"); } NpgsqlCopyIn bulk = new NpgsqlCopyIn(String.Format(CultureInfo.InvariantCulture, "COPY {0} FROM STDIN WITH CSV", tableName), (NpgsqlConnection)connection); PostgreSQLInsightBulkCopy insightBulkCopy = new PostgreSQLInsightBulkCopy(bulk); try { bulk.Start(); var stream = bulk.CopyStream; StreamWriter writer = new StreamWriter(stream); int row = 0; while (reader.Read()) { for (int i = 0; i < reader.FieldCount; i++) { if (i > 0) { writer.Write(CsvDelimiter); } object value = reader.GetValue(i); if (value != DBNull.Value) { writer.Write(CsvQuote); writer.Write(_csvRegex.Replace(value.ToString(), CsvReplacement)); writer.Write(CsvQuote); } } writer.WriteLine(); row++; if (insightBulkCopy.NotifyAfter != 0 && row % insightBulkCopy.NotifyAfter == 0) { InsightRowsCopiedEventArgs e = new InsightRowsCopiedEventArgs(); e.RowsCopied = row; insightBulkCopy.OnRowsCopied(insightBulkCopy, e); if (e.Abort) { bulk.Cancel("Cancelled"); return; } } } // must call flush before end // cannot call close on the stream before end writer.Flush(); bulk.End(); } catch (Exception e) { bulk.Cancel(e.Message); throw; } }