public void InvariantCultureNpgsqlCopySerializer() { // Test for https://github.com/npgsql/Npgsql/pull/92 // SetCulture is used to set a culture where a comma is used to separate decimal values (0,5) which will cause problems if Npgsql // doesn't convert correctly to use a point. (0.5) var cmd = new NpgsqlCommand("COPY data (field_int4, field_int8, field_float4) FROM STDIN", Conn); var npgsqlCopySerializer = new NpgsqlCopySerializer(Conn); var npgsqlCopyIn = new NpgsqlCopyIn(cmd, Conn, npgsqlCopySerializer.ToStream); npgsqlCopyIn.Start(); npgsqlCopySerializer.AddInt32(300000); npgsqlCopySerializer.AddInt64(1000000); npgsqlCopySerializer.AddNumber(0.5); npgsqlCopySerializer.EndRow(); npgsqlCopySerializer.Flush(); npgsqlCopyIn.End(); NpgsqlDataReader dr = new NpgsqlCommand("select field_int4, field_int8, field_float4 from data", Conn).ExecuteReader(); dr.Read(); Assert.AreEqual(300000, dr[0]); Assert.AreEqual(1000000, dr[1]); Assert.AreEqual(0.5, dr[2]); }
public override void WriteResult(object Obj) { var dataStream = formatter.FormatObject(Obj) as Stream; if (dataStream == null) return; var tableName = getTableNameFromObject(Obj); var query = "COPY " + tableName + " FROM STDIN WITH CSV"; if (writeConn.State != System.Data.ConnectionState.Open) writeConn.Open(); var cmd = new NpgsqlCommand(query, writeConn as NpgsqlConnection); var cin = new NpgsqlCopyIn(cmd, writeConn as NpgsqlConnection); try { cin.Start(); dataStream.Seek(0, SeekOrigin.Begin); dataStream.CopyTo(cin.CopyStream); cin.End(); } catch { cin.Cancel("Undo copy"); throw; } finally { dataStream.Dispose(); } }
// Stream success tests public static void CopyInFromStream() { cs = new CountStream(); cs.WrapStream = new FileStream("test_copy.cs", FileMode.Open, FileAccess.Read); cin = new NpgsqlCopyIn( new NpgsqlCommand("COPY copy1 FROM STDIN DELIMITER '\b'", conn), conn, cs ); cin.Start(); if(cin.IsActive) { throw new Exception("Copy from stream did not complete in single pass"); } InLength += cs.BytesPassed; InSum += cs.CheckSum; cs.Close(); Console.Out.WriteLine("Copy from stream ok"); }
static long InSum = 0; // this really is a SUM of the bytes! comparison depends on that. #endregion Fields #region Methods public static void CopyInByWriting() { cs = new CountStream(); cs.WrapStream = new FileStream("test_copy.cs", FileMode.Open, FileAccess.Read); cin = new NpgsqlCopyIn( "COPY copy1 FROM STDIN DELIMITER '\b'", conn ); cin.Start(); if(! cin.IsActive) { throw new Exception("Copy started inactive"); } byte[] buf = new byte[8]; int i; while( (i = cs.Read(buf,0,buf.Length)) > 0 ) { cin.CopyStream.Write(buf, 0, i); } cin.End(); InLength += cs.BytesPassed; InSum += cs.CheckSum; cs.Close(); Console.Out.WriteLine("Copy from writing ok"); }
public void Bug188BufferNpgsqlCopySerializer() { var cmd = new NpgsqlCommand("COPY data (field_int4, field_text) FROM STDIN", Conn); var npgsqlCopySerializer = new NpgsqlCopySerializer(Conn); var npgsqlCopyIn = new NpgsqlCopyIn(cmd, Conn, npgsqlCopySerializer.ToStream); string str = "Very long string".PadRight(NpgsqlCopySerializer.DEFAULT_BUFFER_SIZE, 'z'); npgsqlCopyIn.Start(); npgsqlCopySerializer.AddInt32(12345678); npgsqlCopySerializer.AddString(str); npgsqlCopySerializer.EndRow(); npgsqlCopySerializer.Flush(); npgsqlCopyIn.End(); NpgsqlDataReader dr = new NpgsqlCommand("select field_int4, field_text from data", Conn).ExecuteReader(); dr.Read(); Assert.AreEqual(12345678, dr[0]); Assert.AreEqual(str, dr[1]); }
public PostgreSQLInsightBulkCopy(NpgsqlCopyIn bulkCopy) { if (bulkCopy == null) throw new ArgumentNullException("bulkCopy"); _bulkCopy = bulkCopy; NotifyAfter = 1000; }
/// <summary> /// Bulk copies a set of objects to the server. /// </summary> /// <param name="connection">The connection to use.</param> /// <param name="tableName">The name of the table.</param> /// <param name="reader">The reader to read objects from.</param> /// <param name="configure">A callback method to configure the bulk copy object.</param> /// <param name="options">Options for initializing the bulk copy object.</param> /// <param name="transaction">An optional transaction to participate in.</param> public override void BulkCopy(IDbConnection connection, string tableName, IDataReader reader, Action<InsightBulkCopy> configure, InsightBulkCopyOptions options, IDbTransaction transaction) { if (reader == null) throw new ArgumentNullException("reader"); NpgsqlCopyIn bulk = new NpgsqlCopyIn(String.Format(CultureInfo.InvariantCulture, "COPY {0} FROM STDIN WITH CSV", tableName), (NpgsqlConnection)connection); PostgreSQLInsightBulkCopy insightBulkCopy = new PostgreSQLInsightBulkCopy(bulk); try { bulk.Start(); var stream = bulk.CopyStream; StreamWriter writer = new StreamWriter(stream); int row = 0; while (reader.Read()) { for (int i = 0; i < reader.FieldCount; i++) { if (i > 0) writer.Write(CsvDelimiter); object value = reader.GetValue(i); if (value != DBNull.Value) { writer.Write(CsvQuote); writer.Write(_csvRegex.Replace(value.ToString(), CsvReplacement)); writer.Write(CsvQuote); } } writer.WriteLine(); row++; if (insightBulkCopy.NotifyAfter != 0 && row % insightBulkCopy.NotifyAfter == 0) { InsightRowsCopiedEventArgs e = new InsightRowsCopiedEventArgs(); e.RowsCopied = row; insightBulkCopy.OnRowsCopied(insightBulkCopy, e); if (e.Abort) { bulk.Cancel("Cancelled"); return; } } } // must call flush before end // cannot call close on the stream before end writer.Flush(); bulk.End(); } catch (Exception e) { bulk.Cancel(e.Message); throw; } }
// Serializer success test public static void CopyInWithSerializer() { NpgsqlCopySerializer sink = new NpgsqlCopySerializer( conn ); String q = "COPY copy2(field_int4, field_int8, field_text, field_timestamp, field_bool) FROM STDIN"; cin = new NpgsqlCopyIn( q, conn ); cin.Start(); if(! cin.IsActive) { throw new Exception("Copy started inactive"); } sink.AddInt32(-13); sink.AddNull(); sink.AddString("First row"); sink.AddDateTime(new DateTime( 2020, 12, 22, 23, 33, 45, 765 )); sink.AddBool(true); sink.EndRow(); sink.AddNull(); sink.AddNull(); sink.AddString("Second row"); sink.Close(); Console.Out.WriteLine("Copy through serializer ok"); }
// Stream failure tests public static void FailCopyInFromStream() { cs = new CountStream(); cs.FailAt = 2; cs.WrapStream = new FileStream("test_copy.cs", FileMode.Open, FileAccess.Read); cin = new NpgsqlCopyIn( new NpgsqlCommand("COPY copy1 FROM STDIN DELIMITER '\b'", conn), conn, cs ); try { cin.Start(); } catch(Exception e) { if( (""+e).Contains("Test Exception handling") ) { Console.Out.WriteLine("Copy from stream failed as requested."); return; } else { Console.Out.WriteLine("Copy from stream failing failed: " + e); throw e; } } finally { cs.Close(); cin.End(); // should do nothing } Console.Out.WriteLine("Copy from stream did not fail as requested"); }
public static void FailCopyInByWriting() { cs = new CountStream(); cs.FailAt = 2; cs.WrapStream = new FileStream("test_copy.cs", FileMode.Open, FileAccess.Read); cin = new NpgsqlCopyIn( "COPY copy1 FROM STDIN", conn ); cin.Start(); if(! cin.IsActive) { throw new Exception("Copy started inactive"); } byte[] buf = new byte[8]; int i; try { while( (i = cs.Read(buf,0,buf.Length)) > 0 ) { cin.CopyStream.Write(buf, 0, i); } } catch(Exception e) { if( (""+e).Contains("Test Exception handling") ) { try { cin.Cancel("Test whether copy in fails correctly"); } catch(Exception e2) { if( (""+e2).Contains("Test whether copy in fails correctly") ) { Console.Out.WriteLine("Copy from writing failed as requested."); return; } throw e2; } throw new Exception("CopyIn.Cancel() didn't throw up the expected exception"); } throw e; } finally { cs.Close(); cin.End(); // should do nothing } throw new Exception("Copy from writing did not fail as requested"); }
/// <summary> /// Does the actual bulk inserts. /// </summary> /// <param name="table"></param> /// <param name="table_name"></param> /// <param name="batch_size"></param> private void BulkCopy(DataTable table, string table_name, int batch_size) { if (table != null && table.Rows.Count > 0) { // the copy command. NpgsqlCommand command = new NpgsqlCommand(string.Format( "COPY {0} FROM STDIN WITH BINARY", table_name), _connection); // the copy in stream. // TODO: convert this to binary mode for speed and // to make sure the char ` can also be included in tags! NpgsqlCopyIn cin = new NpgsqlCopyIn(command, _connection); // copy line-by-line. cin.Start(); try { System.IO.Stream target = cin.CopyStream; //Stream target = new FileInfo(@"C:\Users\ben.abelshausen\Desktop\node_osmsharp.copy").OpenWrite(); // write header. List<byte> header = new List<byte>(); header.AddRange(System.Text.Encoding.ASCII.GetBytes("PGCOPY\n")); header.Add((byte)255); header.AddRange(System.Text.Encoding.ASCII.GetBytes("\r\n\0")); header.Add((byte)0); // start of Flags field header.Add((byte)0); header.Add((byte)0); header.Add((byte)0); header.Add((byte)0); // start of Flags field header.Add((byte)0); header.Add((byte)0); header.Add((byte)0); target.Write(header.ToArray(), 0, header.Count); for (int row_idx = 0; row_idx < table.Rows.Count; row_idx++) { // for each row generate the binary data. // write the 16-bit integer count of the number of fields byte[] field_count_data = BitConverter.GetBytes((short)table.Columns.Count); this.ReverseEndianness(target, field_count_data); //target.Write(field_count_data, 0, field_count_data.Length); for (int column_idx = 0; column_idx < table.Columns.Count; column_idx++) { // serialize the data. byte[] field_data = null; object value = table.Rows[row_idx][column_idx]; bool reverse = false; if (value == null || value == DBNull.Value) { // do nothing: just leave the field_data null. } else if (value is long) { // convert the long data into bytes postgres can understand. field_data = BitConverter.GetBytes((long)value); reverse = true; } else if (value is int) { // convert the int data into bytes postgres can understand. field_data = BitConverter.GetBytes((int)value); reverse = true; } else if (value is double) { // convert the double data into bytes postgres can understand. field_data = BitConverter.GetBytes((double)value); reverse = true; } else if (value is float) { // convert the float data into bytes postgres can understand. field_data = BitConverter.GetBytes((float)value); reverse = true; } else if (value is decimal) { // convert the decimal data into bytes postgres can understand. field_data = BitConverter.GetBytes((double)value); reverse = true; } else if (value is DateTime) { // convert the string data into bytes postgres can understand. long microseconds = (long)((DateTime)value - (new DateTime(2000, 01, 01))).TotalSeconds * 1000000; //field_data = System.Text.Encoding.ASCII.GetBytes(((DateTime)value).ToString( // System.Globalization.CultureInfo.InvariantCulture)); field_data = BitConverter.GetBytes(microseconds); reverse = true; } else if (value is string) { // convert the string data into bytes postgres can understand. field_data = System.Text.Encoding.ASCII.GetBytes(value as string); } else if (value is bool) { // convert the bool data into bytes postgres can understand. field_data = new byte[1]; if ((bool)value) { field_data[0] = (byte)1; } else { field_data[0] = (byte)0; } } else { // the type of the value is unsupported! throw new InvalidDataException(string.Format("Data type not supported: {0}!", value.GetType())); } // write the length of the field. int length = -1; // represents NULL. if (field_data != null) { // the lenght is non-zero. length = field_data.Length; } byte[] length_data = BitConverter.GetBytes(length); this.ReverseEndianness(target, length_data); // write the data. if (field_data != null) { if (reverse) { // write the data in reverse. this.ReverseEndianness(target, field_data); } else { // write the data in order. target.Write(field_data, 0, field_data.Length); } } } if (row_idx % 100 == 0) { // flush the data once in a while. target.Flush(); } } // write the file trailer: a 16-bit integer word containing -1 byte[] trailer = BitConverter.GetBytes((short)-1); target.Write(trailer, 0, trailer.Length); // flush the stream data and close. target.Flush(); target.Close(); } catch (Exception ex) { cin.Cancel(ex.Message); } finally { cin.End(); } OsmSharp.Logging.Log.TraceEvent("OsmSharp.Data.PostgreSQL.Osm.Streams.PostgeSQLOsmStreamTarget", OsmSharp.Logging.TraceEventType.Information, "Inserted {0} records into {1}!", table.Rows.Count, table_name); } }
public void Bug221MillisecondsFieldNotCopied() { // Test for https://github.com/npgsql/Npgsql/issues/221 // The milliseconds field is not properly copied in NpgsqlCopySerializer.cs in method AddDateTime var cmd = new NpgsqlCommand("COPY data (field_timestamp) FROM STDIN", Conn); var npgsqlCopySerializer = new NpgsqlCopySerializer(Conn); var npgsqlCopyIn = new NpgsqlCopyIn(cmd, Conn, npgsqlCopySerializer.ToStream); var testDate = DateTime.Parse("2002-02-02 09:00:23.005"); npgsqlCopyIn.Start(); npgsqlCopySerializer.AddDateTime(testDate); npgsqlCopySerializer.EndRow(); npgsqlCopySerializer.Flush(); npgsqlCopyIn.End(); NpgsqlDataReader dr = new NpgsqlCommand("select field_timestamp from data", Conn).ExecuteReader(); dr.Read(); Assert.AreEqual(testDate, dr[0]); }
public void Bug219NpgsqlCopyInConcurrentUsage() { try { // Create temporary test tables ExecuteNonQuery(@"CREATE TABLE Bug219_table1 ( id integer, name character varying(100) ) WITH ( OIDS=FALSE );"); ExecuteNonQuery(@"CREATE TABLE Bug219_table2 ( id integer, null1 integer, name character varying(100), null2 integer, description character varying(1000), null3 integer ) WITH ( OIDS=FALSE );"); using (var connection1 = new NpgsqlConnection(ConnectionString)) using (var connection2 = new NpgsqlConnection(ConnectionString)) { connection1.Open(); connection2.Open(); var copy1 = new NpgsqlCopyIn("COPY Bug219_table1 FROM STDIN;", connection1); var copy2 = new NpgsqlCopyIn("COPY Bug219_table2 FROM STDIN;", connection2); copy1.Start(); copy2.Start(); NpgsqlCopySerializer cs1 = new NpgsqlCopySerializer(connection1); //NpgsqlCopySerializer cs2 = new NpgsqlCopySerializer(connection2); for (int index = 0; index < 10; index++) { cs1.AddInt32(index); cs1.AddString(string.Format("Index {0} ", index)); cs1.EndRow(); /*cs2.AddInt32(index); cs2.AddNull(); cs2.AddString(string.Format("Index {0} ", index)); cs2.AddNull(); cs2.AddString("jjjjj"); cs2.AddNull(); cs2.EndRow();*/ } cs1.Close(); //Exception //cs2.Close(); copy1.End(); copy2.End(); } } catch (Exception) { throw; } finally { ExecuteNonQuery(@"DROP TABLE IF EXISTS Bug219_table1"); ExecuteNonQuery(@"DROP TABLE IF EXISTS Bug219_table2"); } }
protected void InsertDataToDbBulkMethod(DataTable table) { List<string> columns_names = new List<string>(); for (int i = 0; i < table.Columns.Count; i++) columns_names.Add(table.Columns[i].ColumnName); string sql = string.Format("COPY {0}({1}) FROM STDIN", table.TableName, string.Join(",", columns_names.ToArray())); _cmd = CreateCommand(sql); _cmd.CommandType = CommandType.Text; var serializer = new NpgsqlCopySerializer(_conn as NpgsqlConnection); NpgsqlCopyIn copyIn = new NpgsqlCopyIn((_cmd as NpgsqlCommand), (_conn as NpgsqlConnection), serializer.ToStream); try { copyIn.Start(); foreach (DataRow dr in table.Rows) { for (int i = 0; i < table.Columns.Count; i++) AddValueToSerializer(serializer, dr[i]); serializer.EndRow(); serializer.Flush(); } copyIn.End(); serializer.Close(); } catch (Exception e) { try { copyIn.Cancel("Exception has occured!"); } catch (NpgsqlException ex) { if (ex.BaseMessage.Contains("Exception has occured!")) throw new Exception(string.Format("Copy was uncanceled. exception1: {0};exception2: {1}", e.Message, ex.Message)); } } }