public static bool FillTestData(string connectionStr) { using (NpgsqlConnection conn = new NpgsqlConnection(connectionStr)) { try { conn.Open(); using (var writer = conn.BeginTextImport("COPY phrases (words) FROM STDIN")) { var lines = File.ReadLines("Data.txt"); foreach (var line in lines) { writer.WriteLine(line); } } conn.Close(); return(true); } catch (Exception ex) { return(false); } } }
public override void Up() { Execute.WithConnection((conn, tran) => { var cmd = new NpgsqlCommand(); using (var pgconn = new NpgsqlConnection(conn.ConnectionString)) { pgconn.Open(); var command = @"copy public.featureclassifications (class_code, content, description) from stdin CSV DELIMITER E'\t' QUOTE E'\b' ESCAPE '\' NULL AS '' encoding 'UTF8'"; using (var writer = pgconn.BeginTextImport(command)) { string line; var file = new StreamReader(_configuration.GetValue <string>("PathToFeatureCodesFile")); while ((line = file.ReadLine()) != null) { if (line.StartsWith("#")) { continue; } writer.Write(line); writer.Write("\n"); } } } }); }
public override async Task BulkInsert(IDataReader reader, string schemaName, string tableName, int timeout, CancellationToken token) { await using (var conn = new NpgsqlConnection(Config.ConnectionString)) { await conn.OpenAsync(token); using (var writer = conn.BeginTextImport(GetCopyQuery(new CopyQueryParameters { TableName = string.IsNullOrEmpty(schemaName) ? tableName : schemaName + "." + tableName, Delimiter = ",", Encoding = "UTF8", Escape = '\"', Format = "csv", Header = false, Null = "", Quote = '\"' }))) { while (reader.Read()) { await writer.WriteLineAsync(GetLine(reader)); } } } }
public override void Up() { Execute.WithConnection((conn, tran) => { var cmd = new NpgsqlCommand(); using (var pgconn = new NpgsqlConnection(conn.ConnectionString)) { pgconn.Open(); var command = @"copy public.countries (iso, iso3, iso_numeric, fips, name, capital, area, population, continent, tld, currency_code, currency_name, phone, postal_code_format, postal_code_regex, languages, geoname_id, neighbours, equivalent_fips_code) from stdin CSV DELIMITER E'\t' QUOTE E'\b' ESCAPE '\' NULL AS '' encoding 'UTF8'"; using (var writer = pgconn.BeginTextImport(command)) { string line; var file = new StreamReader(_configuration.GetValue <string>("PathToCountryInfoFile")); while ((line = file.ReadLine()) != null) { if (line.StartsWith("#")) { continue; } writer.Write(line); writer.Write("\n"); } } } }); }
/* * public ConcurrentBag<Stop> GetStops() * { * return _stopsBag; * } * * public void AddStop(Stop stop) * { * _stopsBag.Add(stop); * } * * public void AddPause(Pause pause) * { * _pausesBag.Add(pause); * } * * public void AddResume(Resume resume) * { * _resumesBag.Add(resume); * } * * public void AddBuffer(Buffer buffer) * { * _buffersBag.Add(buffer); * } * * public void AddSeek(Seek seek) * { * _seeksBag.Add(seek); * } */ private void OnTimeElapsedEvent(Object stateInfo) { //if (_startsBag.Count > 9800) //{ //_timer.Enabled = false; //} //else //{ // return; //} var startsToProcess = new ConcurrentBag <Start>(); Interlocked.Exchange(ref startsToProcess, _startsBag); _startsBag = new ConcurrentBag <Start>(); using (var connection = new NpgsqlConnection(connString)) { connection.Open(); using (var writer = connection.BeginTextImport("COPY dbo.\"Starts\"(datetimeadded) FROM STDIN;")) { foreach (var start in startsToProcess) { writer.WriteLine(start.DateTimeAdded); } writer.Flush(); } } }
public override void Up() { Execute.WithConnection((conn, tran) => { var cmd = new NpgsqlCommand(); using (var pgconn = new NpgsqlConnection(conn.ConnectionString)) { pgconn.Open(); var command = @"copy public.geonames (geoname_id, name, asciiname, alternatenames, latitude, longitude, featureclass, featurecode, countrycode, cc2, admin1code, admin2code, admin3code, admin4code, population, elevation, dem, timezone, last_updated) from stdin CSV DELIMITER E'\t' QUOTE E'\b' ESCAPE '\' NULL AS '' encoding 'UTF8'"; using (var writer = pgconn.BeginTextImport(command)) { string line; var file = new StreamReader(_configuration.GetValue <string>("PathToAllCountriesFile")); while ((line = file.ReadLine()) != null) { writer.Write(line); writer.Write("\n"); } } } }); }
public static async Task CopyFromFile(NpgsqlConnection conn, FileInfo file, string copyFromCommand) { using var stream = file.OpenRead(); using var writer = conn.BeginTextImport(copyFromCommand); using var reader = new StreamReader(stream); while (!reader.EndOfStream) { var line = await reader.ReadLineAsync(); await writer.WriteLineAsync(line); } }
public void CancelTextImport() { using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); ExecuteNonQuery("CREATE TEMP TABLE data (field_text TEXT, field_int2 SMALLINT, field_int4 INTEGER)", conn); var writer = (NpgsqlCopyTextWriter)conn.BeginTextImport("COPY data (field_text, field_int4) FROM STDIN"); writer.Write("HELLO\t1\n"); writer.Cancel(); Assert.That(ExecuteScalar(@"SELECT COUNT(*) FROM data", conn), Is.EqualTo(0)); } }
public bool BulkCopyToDB(IEnumerable <string> strArray) { var connString = Configuration[Constants.PostgresqlConnStr]; using (var conn = new NpgsqlConnection(connString)) { conn.Open(); using (var writer = conn.BeginTextImport($@"copy TempUPC (""descriptionid"",""upccode"",""description"") from STDIN")) { foreach (var s in strArray) { writer.WriteLine(s); } } } return(true); }
public static async Task Copy( this NpgsqlConnection c, string table, string[] columns, string csv) { var cols = string.Join(", ", columns); var copy = $"COPY {table} ({cols}) FROM STDIN WITH csv header"; Console.WriteLine(copy); await c.OpenAsync(); using (var w = c.BeginTextImport(copy)) { await w.WriteAsync(csv); } await c.CloseAsync(); }
private static void Write(NpgsqlConnection con, string copyInto, int startIndex, int numTableColumns, List <string> record, int numColumns, int sensorID) { string row = ""; using (var writer = con.BeginTextImport(copyInto)) { for (int i = 0; i < record.Count / numColumns; i++) { row += sensorID + "\t"; row += record[i * numColumns] + "\t"; for (int j = startIndex; j < startIndex + numTableColumns - 1; j++) { row += record[j + numColumns * i] + "\t"; } row += record[i * numColumns + startIndex + numTableColumns - 1] + "\n"; writer.Write(row); row = ""; } } }
public static Exception Copy(string destinationConnectionString, IList <Dictionary <string, object> > datas, IList <string> fieldNames, string destinationTableName) { try { if (datas == null || !datas.Any()) { return(null); } if (fieldNames == null || !fieldNames.Any()) { return(null); } using (var destinationConnection = new NpgsqlConnection(destinationConnectionString)) { destinationConnection.Open(); var destinationQuery = $"{destinationTableName}({string.Join(",", fieldNames)})"; using (var writer = destinationConnection.BeginTextImport($"COPY {destinationQuery} FROM STDIN")) { foreach (var item in datas) { var rowString = string.Join("\t", fieldNames.Select(name => $"{item[name].GenerateFormatText()}")); writer.Write($"{rowString}\n"); } writer.Write("\\.\n"); } destinationConnection.Close(); return(null); } } catch (Exception exception) { QueryHandler.OnErrorExecute(exception); return(exception); } }
public void CloseDuringCopy() { // TODO: Check no broken connections were returned to the pool using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); ExecuteNonQuery("CREATE TEMP TABLE data (field_text TEXT, field_int2 SMALLINT, field_int4 INTEGER)", conn); conn.BeginBinaryImport("COPY data (field_text, field_int4) FROM STDIN BINARY"); } using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); ExecuteNonQuery("CREATE TEMP TABLE data (field_text TEXT, field_int2 SMALLINT, field_int4 INTEGER)", conn); conn.BeginBinaryExport("COPY data (field_text, field_int2) TO STDIN BINARY"); } using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); ExecuteNonQuery("CREATE TEMP TABLE data (field_text TEXT, field_int2 SMALLINT, field_int4 INTEGER)", conn); conn.BeginRawBinaryCopy("COPY data (field_text, field_int4) FROM STDIN BINARY"); } using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); ExecuteNonQuery("CREATE TEMP TABLE data (field_text TEXT, field_int2 SMALLINT, field_int4 INTEGER)", conn); conn.BeginRawBinaryCopy("COPY data (field_text, field_int4) TO STDIN BINARY"); } using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); ExecuteNonQuery("CREATE TEMP TABLE data (field_text TEXT, field_int2 SMALLINT, field_int4 INTEGER)", conn); conn.BeginTextImport("COPY data (field_text, field_int4) FROM STDIN"); } using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); ExecuteNonQuery("CREATE TEMP TABLE data (field_text TEXT, field_int2 SMALLINT, field_int4 INTEGER)", conn); conn.BeginTextExport("COPY data (field_text, field_int4) TO STDIN"); } }
/// <summary> /// Builds a COPY statement and sends it to the database for a single batch write /// </summary> private void WriteASingleBatchWithoutTryCatch() { // No log to write, return without opening a connection to the database if (_logs.IsEmpty) { return; } // Establish a connection using (var conn = new NpgsqlConnection(_connectionString)) { conn.Open(); // Note : the BINARY copy seems to not work on Postgres 9.2. Not tested with other versions. using (var writer = conn.BeginTextImport($"COPY {_tableName} (eventid, date, durationms, ticks) FROM STDIN")) { // Read each log in the queue and write it in the SQL statement while (_logs.TryDequeue(out var report)) { writer.Write($"{report.EventId}\t{report.StartDate.ToUniversalTime().ToString("o")}\t{report.Duration.TotalMilliseconds}\t{report.Ticks}\n"); } } } }
/// <summary> /// 表批量写入 /// </summary> /// <param name="sourceDataTable">数据表</param> /// <param name="targetTableName">数据库表名</param> /// <returns></returns> public int BulkCopy(DataTable sourceDataTable, string targetTableName) { using (NpgsqlConnection connection = new NpgsqlConnection(connectionString)) { connection.Open(); var columns = sourceDataTable.Columns.Cast <DataColumn>().Select(x => x.ColumnName).ToList(); string copyString = "COPY " + "\"" + targetTableName + "\"" + " (\"" + string.Join("\",\"", columns) + "\") FROM STDIN (FORMAT BINARY)"; using (var writer = connection.BeginTextImport(copyString)) { foreach (DataRow dr in sourceDataTable.Rows) { var rv = string.Empty; for (int i = 0; i < columns.Count; i++) { if (i > 0) { rv += ","; } var rc = dr[columns[i]].ToString(); if (rc.Contains(",")) { rc = "\"" + rc.Replace("\"", "\"\"") + "\""; } rv += rc; } writer.WriteLine(rv); } } return(sourceDataTable.Rows.Count); } }
//Пакетное обновление таблицы базы данных. (++) private static bool BatchUpdateDbTable(string connectionString, string tableName, string csvFilePath) { string copyFromCommand = $"COPY {tableName} FROM STDIN WITH (FORMAT CSV, HEADER TRUE)"; int allRowsCount = 0; int rowCount = 0; bool result = false; if (File.Exists(csvFilePath)) { Timer timer = new Timer(8000) { AutoReset = true }; timer.Elapsed += delegate { Console.WriteLine($"{rowCount} rows from {allRowsCount} complete. ({(float)rowCount/allRowsCount:p2})"); Console.CursorTop--; }; IEnumerable <string> allRows = File.ReadLines(csvFilePath).Where(rw => rw != string.Empty); allRowsCount = allRows.Count(); connectionString = connectionString + "; Pooling=false; Keepalive=30;"; //Если строк в таблице очень много (более 250 000), writer.Close() не хватает стандартного времени до закрытия connection. timer.Start(); using (NpgsqlConnection connection = new NpgsqlConnection(connectionString)) { connection.Open(); try { TextWriter writer = connection.BeginTextImport(copyFromCommand); int flushCount = 0; foreach (string dataRow in allRows) { writer.WriteLine(dataRow); rowCount++; flushCount++; if (flushCount >= 200000) { writer.Flush(); flushCount = 0; } } timer.Stop(); Console.WriteLine($"{rowCount} rows complete. (100%)"); DateTime start = DateTime.Now; Console.WriteLine("Commit data to server..."); writer.Close(); //"Exeption while reading from stream", if DataTable have to many rows and "Keepalive=default" in connectionString. See line 116. DateTime finish = DateTime.Now; Console.WriteLine($"Complete at: {finish - start}"); result = true; } catch (Exception e) { timer.Stop(); Console.WriteLine(e.Message); result = false; } } } else { throw new Exception($"File {csvFilePath} not found."); } return(result); }