public void Test_LocalDate() { CreateTable("date"); var subject = new PostgreSQLCopyHelper <AllNodaTypesEntity>("sample", "noda_time_test") .MapDate("col_noda", x => x.LocalDate); var entity = new AllNodaTypesEntity { LocalDate = new LocalDate(2011, 1, 2) }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); var actual = (LocalDate)rows[0][0]; Assert.AreEqual(entity.LocalDate, actual); }
public void Test_UpperCase_BulkInsert() { // Use Upper Case Schema Name: subject = new PostgreSQLCopyHelper<TestEntity>("SAMPLE", "UNIT_TEST") .MapSmallInt("col_smallint", x => x.SmallInt); // Try to work with the Bulk Inserter: var entity0 = new TestEntity() { SmallInt = Int16.MinValue }; var entity1 = new TestEntity() { SmallInt = Int16.MaxValue }; subject.SaveAll(connection, new[] { entity0, entity1 }); var result = connection.GetAll("sample", "unit_test"); // Check if we have the amount of rows: Assert.AreEqual(2, result.Count); Assert.IsNotNull(result[0][0]); Assert.IsNotNull(result[1][0]); Assert.AreEqual(Int16.MinValue, (Int16)result[0][0]); Assert.AreEqual(Int16.MaxValue, (Int16)result[1][0]); }
public void Test_ZonedDateTime() { CreateTable("timestamptz"); var subject = new PostgreSQLCopyHelper <AllNodaTypesEntity>("sample", "noda_time_test") .MapTimeStampTz("col_noda", x => x.ZonedDateTime); var timezone = DateTimeZoneProviders.Tzdb.GetZoneOrNull("Africa/Kigali"); var instant = Instant.FromUtc(2011, 1, 5, 22, 50, 0) + Duration.FromMilliseconds(193); var entity = new AllNodaTypesEntity { ZonedDateTime = new ZonedDateTime(instant, timezone) }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); // TODO: How does Postgres <-> NodaTime convert Timezones? There is a good test here, but // I couldn't see through it yet: // // https://github.com/npgsql/npgsql/blob/766658172f08abb0b87a6b7f01a7ea4b49952a29/test/Npgsql.PluginTests/NodaTimeTests.cs // var actual = (Instant)rows[0][0]; Assert.AreEqual(instant, actual); }
public void Test_Interval() { CreateTable("interval"); var begin = new LocalDateTime(2020, 1, 23, 0, 12); var end = new LocalDateTime(2020, 12, 8, 12, 44); var subject = new PostgreSQLCopyHelper <AllNodaTypesEntity>("sample", "noda_time_test") .MapInterval("col_noda", x => x.Period); var entity = new AllNodaTypesEntity { Period = Period.Between(begin, end) }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); var actual = (Period)rows[0][0]; Assert.AreEqual(entity.Period, actual); }
public void Test_Instant() { CreateTable("timestamp"); var subject = new PostgreSQLCopyHelper <AllNodaTypesEntity>("sample", "noda_time_test") .MapTimeStamp("col_noda", x => x.Instant); var entity = new AllNodaTypesEntity { Instant = Instant.FromUtc(2011, 1, 2, 0, 0) }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); var actual = (Instant)rows[0][0]; Assert.AreEqual(entity.Instant, actual); }
public void Test_OffsetDateTime() { CreateTable("timestamptz"); var subject = new PostgreSQLCopyHelper <AllNodaTypesEntity>("sample", "noda_time_test") .MapTimeStampTz("col_noda", x => x.OffsetDateTime); var entity = new AllNodaTypesEntity { OffsetDateTime = new OffsetDateTime(new LocalDateTime(2001, 11, 21, 0, 32), Offset.FromHours(2)) }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); var actual = (Instant)rows[0][0]; Assert.AreEqual(entity.OffsetDateTime.ToInstant(), actual); }
public void Test_LocalDateTime() { CreateTable("timestamp"); var subject = new PostgreSQLCopyHelper <AllNodaTypesEntity>("sample", "noda_time_test") .MapTimeStamp("col_noda", x => x.LocalDateTime); var entity = new AllNodaTypesEntity { LocalDateTime = new LocalDateTime(2011, 1, 2, 21, 0, 0) }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); var actual = (Instant)rows[0][0]; var localTime = entity.LocalDateTime; var zonedTime = localTime.InZoneStrictly(DateTimeZone.Utc); var expected = zonedTime.ToInstant(); Assert.AreEqual(expected, actual); }
public void Test_StringArray() { CreateTable("text"); var subject = new PostgreSQLCopyHelper <StringArrayEntity>("sample", "unit_test") .MapArray("col_array", x => x.Array); var entity0 = new StringArrayEntity() { Array = new[] { "A", "B" } }; var recordsSaved = subject.SaveAll(connection, new[] { entity0 }); var result = GetAll(); // Check if we have the amount of rows: Assert.AreEqual(1, result.Count); Assert.AreEqual(1, recordsSaved); // Check if the Result is not null: Assert.IsNotNull(result[0][0]); // And now check the values: var resultArray = (String[])result[0][0]; Assert.AreEqual("A", resultArray[0]); Assert.AreEqual("B", resultArray[1]); }
private static void loadMovies() { List <Movie> values = File.ReadAllLines(@"C:\Users\Vanya\Desktop\2 семестр\Data retrieval\IR-2019-master\data\IMDB Movie Titles.csv") .Skip(1) .Select(v => Movie.FromCsv(v)) .ToList(); var copyHelper = new PostgreSQLCopyHelper <Movie>("public", "movies") .MapInteger("id", x => x.Id) .MapInteger("year", x => x.Year) .MapText("name", x => x.Name) ; using (var connection = new NpgsqlConnection(connString)) { try { connection.Open(); copyHelper.SaveAll(connection, values); int lineCount = values.Count(); Console.Write(lineCount + " Records Inserted!"); connection.Close(); } catch (Exception er) { Console.Write(er.Message); } } }
public void Test_ZonedDateTime() { CreateTable("timestamptz"); var subject = new PostgreSQLCopyHelper <NullableNodaTypesEntity>("sample", "noda_time_test") .MapTimeStampTz("col_noda", x => x.ZonedDateTime); var entity = new NullableNodaTypesEntity { ZonedDateTime = null }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); // TODO: How does Postgres <-> NodaTime convert Timezones? There is a good test here, but // I couldn't see through it yet: // // https://github.com/npgsql/npgsql/blob/766658172f08abb0b87a6b7f01a7ea4b49952a29/test/Npgsql.PluginTests/NodaTimeTests.cs // var actual = (Instant)rows[0][0]; Assert.AreEqual(entity.ZonedDateTime, actual); }
private void BatchInsert(IEnumerable <Account> accounts) { var helperAccounts = new PostgreSQLCopyHelper <Account>("public", "accounts") .MapBigInt("id", x => x.Id) .MapText("sname", x => x.SName) .MapText("fname", x => x.FName) .MapText("country", x => x.Country) .MapText("city", x => x.City) .MapText("phone", x => x.Phone) .MapText("email", x => x.EMail) .MapInteger("sex", x => (int?)x.Sex) .MapTimeStamp("birth", x => x.Birth) .MapTimeStamp("joined", x => x.Joined) .MapText("status", x => x.Status) .MapNullable("premium_start", x => x.PremiumStart, NpgsqlDbType.Bigint) .MapNullable("premium_finish", x => x.PremiumFinish, NpgsqlDbType.Bigint) .MapArray("interests", x => x.Interests) .MapArray("like_ids", x => x.LikeIds) .MapArray("like_tss", x => x.LikeTSs); using (var connection = new NpgsqlConnection(Configuration.GetConnectionString("DefaultConnection"))) { connection.Open(); helperAccounts.SaveAll(connection, accounts); } }
public void Test_OffsetTime() { CreateTable("timetz"); var subject = new PostgreSQLCopyHelper <NullableNodaTypesEntity>("sample", "noda_time_test") .MapTimeTz("col_noda", x => x.OffsetTime); var entity = new NullableNodaTypesEntity { OffsetTime = null }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); var actual = (OffsetTime)rows[0][0]; Assert.AreEqual(entity.OffsetTime, actual); }
public void Test_LocalDateTime() { CreateTable("timestamp"); var subject = new PostgreSQLCopyHelper <NullableNodaTypesEntity>("sample", "noda_time_test") .MapTimeStamp("col_noda", x => x.LocalDateTime); var entity = new NullableNodaTypesEntity { LocalDateTime = null }; var entities = new[] { entity }; subject.SaveAll(connection, entities); // Check what's written to DB: var rows = GetAll(); var actual = (Instant)rows[0][0]; var expected = entity.LocalDateTime; Assert.AreEqual(expected, actual); }
public void Test_MixedCaseEntity_BulkInsert() { subject = new PostgreSQLCopyHelper <MixedCaseEntity>("sample", "\"MixedCaseEntity\"") .MapInteger("\"Property_One\"", x => x.Property_One) .MapText("\"Property_Two\"", x => x.Property_Two); // Try to work with the Bulk Inserter: var entity0 = new MixedCaseEntity { Property_One = 44, Property_Two = "hello everyone" }; var entity1 = new MixedCaseEntity { Property_One = 89, Property_Two = "Isn't it nice to write in Camel Case!" }; subject.SaveAll(connection, new[] { entity0, entity1 }); var result = connection.GetAll("sample", "\"MixedCaseEntity\""); // Check if we have the amount of rows: Assert.AreEqual(2, result.Count); Assert.IsNotNull(result[0][0]); Assert.IsNotNull(result[1][0]); Assert.AreEqual(entity0.Property_One, (Int32)result[0][0]); Assert.AreEqual(entity0.Property_Two, (string)result[0][1]); Assert.AreEqual(entity1.Property_One, (Int32)result[1][0]); Assert.AreEqual(entity1.Property_Two, (string)result[1][1]); }
public void Test_UpperCase_BulkInsert() { // Use Upper Case Schema Name: subject = new PostgreSQLCopyHelper <TestEntity>("SAMPLE", "UNIT_TEST") .MapSmallInt("col_smallint", x => x.SmallInt); // Try to work with the Bulk Inserter: var entity0 = new TestEntity() { SmallInt = Int16.MinValue }; var entity1 = new TestEntity() { SmallInt = Int16.MaxValue }; subject.SaveAll(connection, new[] { entity0, entity1 }); var result = GetAll(); // Check if we have the amount of rows: Assert.AreEqual(2, result.Count); Assert.IsNotNull(result[0][0]); Assert.IsNotNull(result[1][0]); Assert.AreEqual(Int16.MinValue, (Int16)result[0][0]); Assert.AreEqual(Int16.MaxValue, (Int16)result[1][0]); }
public async Task Write(int clientId, int scenarioId, int partitionId, List <RecommendationProduct> entities) { using (var transaction = Context.Connection.BeginTransaction()) { _logger.Information("Cleaning up entities (client id: {ClientId}, scenario id: {ScenarioId}, partition id: {PartitionId})", clientId, scenarioId, partitionId); using (var command = new NpgsqlCommand("fn_delete_result_partition", (NpgsqlConnection)Context.Connection, (NpgsqlTransaction)transaction) { CommandType = CommandType.StoredProcedure }) { command.Parameters.AddWithValue("p_client_id", NpgsqlDbType.Integer, clientId); command.Parameters.AddWithValue("p_scenario_id", NpgsqlDbType.Integer, scenarioId); command.Parameters.AddWithValue("p_partition_number", NpgsqlDbType.Integer, partitionId); await command.ExecuteNonQueryAsync(); } _logger.Information("Writing {ProductCount} entities", entities.Count); Helper.SaveAll((NpgsqlConnection)Context.Connection, entities); RecommendationRepository.Helper.SaveAll((NpgsqlConnection)Context.Connection, entities.SelectMany(x => x.Recommendations)); RecommendationProjectionRepository.Helper.SaveAll((NpgsqlConnection)Context.Connection, entities.SelectMany(x => x.Recommendations).SelectMany(x => x.Projections)); transaction.Commit(); } }
public void Test_MixedCaseEntity_BulkInsert() { subject = new PostgreSQLCopyHelper<MixedCaseEntity>("sample", "\"MixedCaseEntity\"") .MapInteger("\"Property_One\"", x => x.Property_One) .MapText("\"Property_Two\"", x => x.Property_Two); // Try to work with the Bulk Inserter: var entity0 = new MixedCaseEntity { Property_One = 44, Property_Two = "hello everyone" }; var entity1 = new MixedCaseEntity { Property_One = 89, Property_Two = "Isn't it nice to write in Camel Case!" }; subject.SaveAll(connection, new[] { entity0, entity1 }); var result = connection.GetAll("sample", "\"MixedCaseEntity\""); // Check if we have the amount of rows: Assert.AreEqual(2, result.Count); Assert.IsNotNull(result[0][0]); Assert.IsNotNull(result[1][0]); Assert.AreEqual(entity0.Property_One, (Int32)result[0][0]); Assert.AreEqual(entity0.Property_Two, (string)result[0][1]); Assert.AreEqual(entity1.Property_One, (Int32)result[1][0]); Assert.AreEqual(entity1.Property_Two, (string)result[1][1]); }
public void Test_DoubleArray() { CreateTable("double precision"); var subject = new PostgreSQLCopyHelper <DoubleArrayEntity>("sample", "unit_test") .MapArray("col_array", x => x.Array); var entity0 = new DoubleArrayEntity() { Array = new Double[] { 1.32, 2.124 } }; subject.SaveAll(connection, new[] { entity0 }); var result = GetAll(); // Check if we have the amount of rows: Assert.AreEqual(1, result.Count); // Check if the Result is not null: Assert.IsNotNull(result[0][0]); // And now check the values: var resultArray = (Double[])result[0][0]; Assert.AreEqual(1.32, resultArray[0], 1e-5); Assert.AreEqual(2.124, resultArray[1], 1e-5); }
public void Test_ByteArrayArray() { CreateTable("bytea"); var subject = new PostgreSQLCopyHelper <ByteArrayArrayEntity>("sample", "unit_test") .MapArray("col_array", x => x.Array); var entity0 = new ByteArrayArrayEntity() { Array = new byte[][] { new byte[] { 1, 2 } } }; subject.SaveAll(connection, new[] { entity0 }); var result = GetAll(); // Check if we have the amount of rows: Assert.AreEqual(1, result.Count); // Check if the Result is not null: Assert.IsNotNull(result[0][0]); // And now check the values: var resultArray = (byte[][])result[0][0]; Assert.AreEqual(1, resultArray[0][0]); Assert.AreEqual(2, resultArray[0][1]); }
public ulong StoreExternalIds(PostgreSQLCopyHelper <exid_in_db> copyHelper, IEnumerable <exid_in_db> entities) { using (var conn = new NpgsqlConnection(connString)) { conn.Open(); return(copyHelper.SaveAll(conn, entities)); } }
public ulong StoreCityNUTS(PostgreSQLCopyHelper <city_nuts_in_db> copyHelper, IEnumerable <city_nuts_in_db> entities) { using (var conn = new NpgsqlConnection(connString)) { conn.Open(); return(copyHelper.SaveAll(conn, entities)); } }
public ulong StoreRelationships(PostgreSQLCopyHelper <relationship_in_db> copyHelper, IEnumerable <relationship_in_db> entities) { using (var conn = new NpgsqlConnection(connString)) { conn.Open(); return(copyHelper.SaveAll(conn, entities)); } }
public ulong StorePMIDLinks(PostgreSQLCopyHelper <PMIDLink> copyHelper, IEnumerable <PMIDLink> entities) { using (var conn = new NpgsqlConnection(connString)) { conn.Open(); return(copyHelper.SaveAll(conn, entities)); } }
public ulong StoreIPAddresses(PostgreSQLCopyHelper <ip_address_in_db> copyHelper, IEnumerable <ip_address_in_db> entities) { using (var conn = new NpgsqlConnection(connString)) { conn.Open(); return(copyHelper.SaveAll(conn, entities)); } }
public ulong StoreStudyIds(PostgreSQLCopyHelper <StudyId> copyHelper, IEnumerable <StudyId> entities) { // stores the study id data in a temporary table using (var conn = new NpgsqlConnection(connString)) { conn.Open(); return(copyHelper.SaveAll(conn, entities)); } }
private async Task StoreArtistsForUser(User user) { Thread.Sleep(1300); Console.WriteLine($"Starting artist store for {user.UserNameLastFM}"); var topArtists = new List <LastArtist>(); var amountOfPages = Constants.ArtistsToIndex / 1000; for (int i = 1; i < amountOfPages + 1; i++) { var artistResult = await this._lastFMClient.User.GetTopArtists(user.UserNameLastFM, LastStatsTimeSpan.Overall, i, 1000); topArtists.AddRange(artistResult); if (artistResult.Count() < 1000) { break; } Statistics.LastfmApiCalls.Inc(); } var now = DateTime.UtcNow; var artists = topArtists.Select(a => new Artist { LastUpdated = now, Name = a.Name, Playcount = a.PlayCount.Value, UserId = user.UserId }).ToList(); var connString = this._db.Database.GetDbConnection().ConnectionString; var copyHelper = new PostgreSQLCopyHelper <Artist>("public", "artists") .MapText("name", x => x.Name) .MapInteger("user_id", x => x.UserId) .MapInteger("playcount", x => x.Playcount) .MapTimeStamp("last_updated", x => x.LastUpdated); await using (var connection = new NpgsqlConnection(connString)) { connection.Open(); await using var deleteCurrentArtists = new NpgsqlCommand($"DELETE FROM public.artists WHERE user_id = {user.UserId};", connection); await deleteCurrentArtists.ExecuteNonQueryAsync(); copyHelper.SaveAll(connection, artists); await using var setIndexTime = new NpgsqlCommand($"UPDATE public.users SET last_indexed='{now.ToString("u")}' WHERE user_id = {user.UserId};", connection); await setIndexTime.ExecuteNonQueryAsync(); } }
public void PgCopy <TEntity>(PostgreSQLCopyHelper <TEntity> copymap, IEnumerable <TEntity> entities) where TEntity : class { //处理连接 IDbConnection connection = null; connection = SetDbConnection(true, connection); using (connection) { copymap.SaveAll(connection as NpgsqlConnection, entities); } }
public void Test_Issue53_Wrong_Order() { var subject = new PostgreSQLCopyHelper <Child>("sample", "Issue53Child") .MapInteger("Id", x => x.Id) .MapInteger("ParentId", x => x.ParentId) .MapText("Name", x => x.Name); const string errorText = "insert or update on table \"issue53child\" violates foreign key constraint \"issue53child_parentid_fkey\""; var ex = Assert.Throws <PostgresException>(() => subject.SaveAll(connection, CreateChildData())); Assert.That(ex.Message, Is.SupersetOf(errorText)); }
public static void SeedCSVData() { string fullPath = Environment.CurrentDirectory + "\\covid_19_data.csv"; using (StreamReader sr = new StreamReader(fullPath)) { string[] headers = sr.ReadLine().Split(','); Regex CSVParser = new Regex(",(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))"); List <CovidObservationDatum> tableRows = new List <CovidObservationDatum>(); while (!sr.EndOfStream) { var lineData = sr.ReadLine(); string[] row = CSVParser.Split(lineData); var rowData = new CovidObservationDatum { Id = Convert.ToInt32(row[0]), ObservationDate = Convert.ToDateTime(row[1]), ProvinceState = row[2], Country = row[3], LastUpdate = Convert.ToDateTime(row[4]), Confirmed = Convert.ToInt32(row[5]), Deaths = Convert.ToInt32(row[6]), Recovered = Convert.ToInt32(row[7]) }; tableRows.Add(rowData); } var copyHelper = new PostgreSQLCopyHelper <CovidObservationDatum>("covid_observations") .MapInteger(headers[0], x => x.Id) .MapDate(headers[1], x => x.ObservationDate) .MapText(headers[2], x => x.ProvinceState) .MapText(headers[3], x => x.Country) .MapDate(headers[4], x => x.LastUpdate) .MapInteger(headers[5], x => x.Confirmed) .MapInteger(headers[6], x => x.Deaths) .MapInteger(headers[7], x => x.Recovered); using (var connection = new NpgsqlConnection(Startup.ConnectionString)) { connection.Open(); copyHelper.SaveAll(connection, (tableRows)); } } }
public void WriteToDB(IEnumerable <pre_223> listPremacitem) { PostgreSQLCopyHelper <pre_223> coppyHelper = new PostgreSQLCopyHelper <pre_223>("pre_223") .MapVarchar("high_level_item", x => x.high_level_item) .MapVarchar("low_level_item", x => x.low_level_item) .MapDouble("numerator", x => x.numerator); using (NpgsqlConnection connection = new NpgsqlConnection(Properties.Settings.Default.CONNECTSTRING_MES)) { connection.Open(); coppyHelper.SaveAll(connection, listPremacitem); connection.Close(); } }
public async Task Create(List <ScenarioProductFilter> entities) { using (var transaction = Context.Connection.BeginTransaction()) { await DeleteByScenarioId(entities.First().ScenarioId); var recommendationCopyHelper = new PostgreSQLCopyHelper <ScenarioProductFilter>("scenario_Product_filter") .MapInteger("Product_id", x => x.ProductId) .MapInteger("scenario_id", x => x.ScenarioId); recommendationCopyHelper.SaveAll((NpgsqlConnection)Context.Connection, entities); transaction.Commit(); } }
/// <summary> /// Coppy all data to pre_649 /// </summary> /// <param name="listPremacitem">list item</param> public void WriteToDB(IEnumerable <pre_649_order> listPremacitem) { PostgreSQLCopyHelper <pre_649_order> coppyHelper = new PostgreSQLCopyHelper <pre_649_order>("pre_649_order") .MapVarchar("item_number", x => x.item_number) .MapVarchar("order_number", x => x.order_number) .MapDouble("order_qty", x => x.order_qty) .MapVarchar("supplier_cd", x => x.supplier_cd) .MapDate("order_date", x => x.order_date); using (NpgsqlConnection connection = new NpgsqlConnection(Properties.Settings.Default.CONNECTSTRING_MES)) { connection.Open(); coppyHelper.SaveAll(connection, listPremacitem); connection.Close(); } }