public void InsertTestPostgreSql(DbServer dbServer, bool isBulk) { ContextUtil.DbServer = dbServer; using var context = new TestContext(ContextUtil.GetOptions()); context.Database.ExecuteSqlRaw($@"DELETE FROM ""{nameof(Item)}"""); context.Database.ExecuteSqlRaw($@"ALTER SEQUENCE ""{nameof(Item)}_{nameof(Item.ItemId)}_seq"" RESTART WITH 1"); context.Database.ExecuteSqlRaw($@"DELETE FROM ""{nameof(Box)}"""); context.Database.ExecuteSqlRaw($@"ALTER SEQUENCE ""{nameof(Box)}_{nameof(Box.BoxId)}_seq"" RESTART WITH 1"); context.Database.ExecuteSqlRaw($@"DELETE FROM ""{nameof(UserRole)}"""); var currentTime = DateTime.UtcNow; // default DateTime type: "timestamp with time zone"; DateTime.Now goes with: "timestamp without time zone" var entities = new List <Item>(); for (int i = 1; i <= 2; i++) { var entity = new Item { //ItemId = i, Name = "Name " + i, Description = "info " + i, Quantity = i, Price = 0.1m * i, TimeUpdated = currentTime, }; entities.Add(entity); } var entities2 = new List <Item>(); for (int i = 2; i <= 3; i++) { var entity = new Item { ItemId = i, Name = "Name " + i, Description = "UPDATE " + i, Quantity = i, Price = 0.1m * i, TimeUpdated = currentTime, }; entities2.Add(entity); } var entities3 = new List <Item>(); for (int i = 3; i <= 4; i++) { var entity = new Item { //ItemId = i, Name = "Name " + i, Description = "CHANGE " + i, Quantity = i, Price = 0.1m * i, TimeUpdated = currentTime, }; entities3.Add(entity); } var entities56 = new List <Item>(); for (int i = 5; i <= 6; i++) { var entity = new Item { //ItemId = i, Name = "Name " + i, Description = "CHANGE " + i, Quantity = i, Price = 0.1m * i, TimeUpdated = currentTime, }; entities56.Add(entity); } // INSERT context.BulkInsert(entities); Assert.Equal("info 1", context.Items.Where(a => a.Name == "Name 1").AsNoTracking().FirstOrDefault().Description); Assert.Equal("info 2", context.Items.Where(a => a.Name == "Name 2").AsNoTracking().FirstOrDefault().Description); // UPDATE context.BulkInsertOrUpdate(entities2, new BulkConfig() { NotifyAfter = 1 }, (a) => WriteProgress(a)); Assert.Equal("UPDATE 2", context.Items.Where(a => a.Name == "Name 2").AsNoTracking().FirstOrDefault().Description); Assert.Equal("UPDATE 3", context.Items.Where(a => a.Name == "Name 3").AsNoTracking().FirstOrDefault().Description); var configUpdateBy = new BulkConfig { UpdateByProperties = new List <string> { nameof(Item.Name) } }; configUpdateBy.SetOutputIdentity = true; context.BulkUpdate(entities3, configUpdateBy); Assert.Equal(3, entities3[0].ItemId); // to test Output Assert.Equal(4, entities3[1].ItemId); Assert.Equal("CHANGE 3", context.Items.Where(a => a.Name == "Name 3").AsNoTracking().FirstOrDefault().Description); Assert.Equal("CHANGE 4", context.Items.Where(a => a.Name == "Name 4").AsNoTracking().FirstOrDefault().Description); // Test Multiple KEYS var userRoles = new List <UserRole> { new UserRole { Description = "Info" } }; context.BulkInsertOrUpdate(userRoles); // DELETE context.BulkDelete(new List <Item>() { entities2[1] }, configUpdateBy); // READ var secondEntity = new List <Item>() { new Item { Name = entities[1].Name } }; context.BulkRead(secondEntity, configUpdateBy); Assert.Equal(2, secondEntity.FirstOrDefault().ItemId); Assert.Equal("UPDATE 2", secondEntity.FirstOrDefault().Description); // SAVE CHANGES context.AddRange(entities56); context.BulkSaveChanges(); Assert.Equal(5, entities56[0].ItemId); // BATCH var query = context.Items.AsQueryable().Where(a => a.ItemId <= 1); query.BatchUpdate(new Item { Description = "UPDATE N", Price = 1.5m } /*, updateColumns*/); var queryJoin = context.ItemHistories.Where(p => p.Item.Description == "UPDATE 2"); queryJoin.BatchUpdate(new ItemHistory { Remark = "Rx", }); var query2 = context.Items.AsQueryable().Where(a => a.ItemId > 1 && a.ItemId < 3); query.BatchDelete(); var descriptionsToDelete = new List <string> { "info" }; var query3 = context.Items.Where(a => descriptionsToDelete.Contains(a.Description)); query3.BatchDelete(); // for type 'jsonb' JsonDocument jsonbDoc = JsonDocument.Parse(@"{ ""ModelEL"" : ""Square""}"); var box = new Box { DocumentContent = jsonbDoc, ElementContent = jsonbDoc.RootElement }; context.BulkInsert(new List <Box> { box }); JsonDocument jsonbDoc2 = JsonDocument.Parse(@"{ ""ModelEL"" : ""Circle""}"); var boxQuery = context.Boxes.AsQueryable().Where(a => a.BoxId <= 1); boxQuery.BatchUpdate(new Box { DocumentContent = jsonbDoc2, ElementContent = jsonbDoc2.RootElement }); //var incrementStep = 100; //var suffix = " Concatenated"; //query.BatchUpdate(a => new Item { Name = a.Name + suffix, Quantity = a.Quantity + incrementStep }); // example of BatchUpdate Increment/Decrement value in variable }
public static void Execute(DbContext context, Type type, IList <object> entities, OperationType operationType, BulkConfig bulkConfig, Action <decimal> progress) { using (ActivitySources.StartExecuteActivity((EFCore.BulkExtensions.OperationType)operationType, entities.Count)) { if (operationType != OperationType.Truncate && entities.Count == 0) { return; } TableInfo tableInfo = TableInfo.CreateInstance(context, type, entities, (EFCore.BulkExtensions.OperationType)operationType, bulkConfig); if (operationType == OperationType.Insert && !tableInfo.BulkConfig.SetOutputIdentity) { SqlBulkOperation.Insert(context, type, entities, tableInfo, progress); } else if (operationType == OperationType.Read) { SqlBulkOperation.Read(context, type, entities, tableInfo, progress); } else if (operationType == OperationType.Truncate) { SqlBulkOperation.Truncate(context, tableInfo); } else { SqlBulkOperation.Merge(context, type, entities, tableInfo, operationType, progress); } } }
internal static async Task <SqliteConnection> OpenAndGetSqliteConnectionAsync(DbContext context, BulkConfig bulkConfig, CancellationToken cancellationToken) { await context.Database.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); return((SqliteConnection)context.Database.GetDbConnection()); }
private static System.Data.SqlClient.SqlBulkCopy GetSqlBulkCopy(System.Data.SqlClient.SqlConnection sqlConnection, IDbContextTransaction transaction, BulkConfig config) { var sqlBulkCopyOptions = (System.Data.SqlClient.SqlBulkCopyOptions)config.SqlBulkCopyOptions; if (transaction == null) { return(new System.Data.SqlClient.SqlBulkCopy(sqlConnection, sqlBulkCopyOptions, null)); } var sqlTransaction = (System.Data.SqlClient.SqlTransaction)transaction.GetUnderlyingTransaction(config); return(new System.Data.SqlClient.SqlBulkCopy(sqlConnection, sqlBulkCopyOptions, sqlTransaction)); }
private void RunInsertOrUpdateOrDelete(bool isBulk) { using var context = new TestContext(ContextUtil.GetOptions()); var entities = new List <Item>(); var dateTimeNow = DateTime.Now; for (int i = 2; i <= EntitiesNumber; i += 2) { entities.Add(new Item { ItemId = i, Name = "name InsertOrUpdateOrDelete " + i, Description = "info", Quantity = i, Price = i / (i % 5 + 1), TimeUpdated = dateTimeNow }); } int?keepEntityItemId = null; if (isBulk) { var bulkConfig = new BulkConfig() { SetOutputIdentity = true, CalculateStats = true }; keepEntityItemId = 3; bulkConfig.SetSynchronizeFilter <Item>(e => e.ItemId != keepEntityItemId.Value); context.BulkInsertOrUpdateOrDelete(entities, bulkConfig, (a) => WriteProgress(a)); Assert.Equal(0, bulkConfig.StatsInfo.StatsNumberInserted); Assert.Equal(EntitiesNumber / 2, bulkConfig.StatsInfo.StatsNumberUpdated); Assert.Equal((EntitiesNumber / 2) - 1, bulkConfig.StatsInfo.StatsNumberDeleted); } else { var existingItems = context.Items; var removedItems = existingItems.Where(x => !entities.Any(y => y.ItemId == x.ItemId)); context.Items.RemoveRange(removedItems); context.Items.AddRange(entities); context.SaveChanges(); } // TEST int entitiesCount = context.Items.Count(); Item firstEntity = context.Items.OrderBy(a => a.ItemId).FirstOrDefault(); Item lastEntity = context.Items.OrderByDescending(a => a.ItemId).FirstOrDefault(); Assert.Equal((EntitiesNumber / 2) + (keepEntityItemId != null ? 1 : 0), entitiesCount); Assert.NotNull(firstEntity); Assert.Equal("name InsertOrUpdateOrDelete 2", firstEntity.Name); Assert.NotNull(lastEntity); Assert.Equal("name InsertOrUpdateOrDelete " + EntitiesNumber, lastEntity.Name); if (keepEntityItemId != null) { Assert.NotNull(context.Items.Where(x => x.ItemId == keepEntityItemId.Value).FirstOrDefault()); } if (isBulk) { var bulkConfig = new BulkConfig() { SetOutputIdentity = true, CalculateStats = true }; bulkConfig.SetSynchronizeFilter <Item>(e => e.ItemId != keepEntityItemId.Value); context.BulkInsertOrUpdateOrDelete(new List <Item>(), bulkConfig); var storedEntities = context.Items.ToList(); Assert.Single(storedEntities); Assert.Equal(3, storedEntities[0].ItemId); } }
private async Task RunInsertAsync(bool isBulk) { using var context = new TestContext(ContextUtil.GetOptions()); var entities = new List <Item>(); var subEntities = new List <ItemHistory>(); for (int i = 1; i < EntitiesNumber; i++) { var entity = new Item { ItemId = isBulk ? i : 0, Name = "name " + i, Description = "info " + Guid.NewGuid().ToString().Substring(0, 3), Quantity = i % 10, Price = i / (i % 5 + 1), TimeUpdated = DateTime.Now, ItemHistories = new List <ItemHistory>() }; var subEntity1 = new ItemHistory { ItemHistoryId = SeqGuid.Create(), Remark = $"some more info {i}.1" }; var subEntity2 = new ItemHistory { ItemHistoryId = SeqGuid.Create(), Remark = $"some more info {i}.2" }; entity.ItemHistories.Add(subEntity1); entity.ItemHistories.Add(subEntity2); entities.Add(entity); } if (isBulk) { if (ContextUtil.DbServer == DbServer.SqlServer) { using var transaction = await context.Database.BeginTransactionAsync(); var bulkConfig = new BulkConfig { //PreserveInsertOrder = true, // true is default SetOutputIdentity = true, BatchSize = 4000, CalculateStats = true }; await context.BulkInsertAsync(entities, bulkConfig, (a) => WriteProgress(a)); Assert.Equal(EntitiesNumber - 1, bulkConfig.StatsInfo.StatsNumberInserted); Assert.Equal(0, bulkConfig.StatsInfo.StatsNumberUpdated); Assert.Equal(0, bulkConfig.StatsInfo.StatsNumberDeleted); foreach (var entity in entities) { foreach (var subEntity in entity.ItemHistories) { subEntity.ItemId = entity.ItemId; // setting FK to match its linked PK that was generated in DB } subEntities.AddRange(entity.ItemHistories); } await context.BulkInsertAsync(subEntities); transaction.Commit(); } else if (ContextUtil.DbServer == DbServer.Sqlite) { using var transaction = context.Database.BeginTransaction(); var bulkConfig = new BulkConfig() { SetOutputIdentity = true, }; await context.BulkInsertAsync(entities, bulkConfig); foreach (var entity in entities) { foreach (var subEntity in entity.ItemHistories) { subEntity.ItemId = entity.ItemId; // setting FK to match its linked PK that was generated in DB } subEntities.AddRange(entity.ItemHistories); } await context.BulkInsertAsync(subEntities, bulkConfig); transaction.Commit(); } } else { await context.Items.AddRangeAsync(entities); await context.SaveChangesAsync(); } // TEST int entitiesCount = await context.Items.CountAsync(); // = ItemsCountQuery(context); Item lastEntity = context.Items.OrderByDescending(a => a.ItemId).FirstOrDefault(); // = LastItemQuery(context); Assert.Equal(EntitiesNumber - 1, entitiesCount); Assert.NotNull(lastEntity); Assert.Equal("name " + (EntitiesNumber - 1), lastEntity.Name); }
internal static async Task <DbConnection> OpenAndGetSqlConnectionAsync(DbContext context, BulkConfig config, CancellationToken cancellationToken) { await context.Database.OpenConnectionAsync(cancellationToken).ConfigureAwait(false); return(context.GetUnderlyingConnection(config)); }
public virtual void BulkUpdate(IList <TEntity> entities, BulkConfig bulkConfig = null, Action <decimal> progress = null) => _db.BulkUpdateSave(entities, bulkConfig, progress);
public virtual Task BulkInsertOrUpdateAsync(IList <TEntity> entities, BulkConfig bulkConfig = null, Action <decimal> progress = null) => _db.BulkInsertOrUpdateSaveAsync(entities, bulkConfig, progress);
public async Task BulkInsertOrUpdateOrDeleteAsync(IList <TEntity> entities, BulkConfig config = null, Action <decimal> progress = null) { await Context.BulkInsertOrUpdateOrDeleteAsync(entities, config, progress); }
public async Task BulkReadAsync(IList <TEntity> entities, BulkConfig config = null, Action <decimal> progress = null) { await Context.BulkReadAsync(entities, config, progress); }
public void BulkRead(IList <TEntity> entities, BulkConfig config = null, Action <decimal> progress = null) { Context.BulkRead(entities, config, progress); }
public void BulkInsertOrUpdateOrDelete(IList <TEntity> entities, BulkConfig config = null, Action <decimal> progress = null) { Context.BulkInsertOrUpdateOrDelete(entities, config, progress); }
public virtual async Task <IList <T> > BulkInsertOrUpdateAsync(IList <T> entities, params Expression <Func <T, object> >[] updatePropertiesBy) { if (entities != null && entities.Count > 0) { var strategy = _dbContext.Database.CreateExecutionStrategy(); await strategy.ExecuteAsync(async() => { using (var transaction = _dbContext.Database.BeginTransaction()) { try { var bulkConfig = new BulkConfig() { UseTempDB = true, SetOutputIdentity = true, CalculateStats = true }; await _dbContext.BulkInsertOrUpdateAsync(entities, bulkConfig); transaction.Commit(); } catch (Exception ex) { _logger.LogError(ex.Message, ex.InnerException); transaction.Rollback(); throw; } } }); //using (var transaction = _dbContext.Database.BeginTransaction()) //{ // try // { // var bulkConfig = new BulkConfig() { UseTempDB = true, UseOnlyDataTable = true, SetOutputIdentity = true, CalculateStats = true }; // await _dbContext.BulkInsertOrUpdateAsync(entities, bulkConfig); // transaction.Commit(); // } // catch (Exception ex) // { // _logger.LogError(ex.Message, ex.InnerException); // transaction.Rollback(); // throw; // } //} //try //{ // var bulkConfig = new BulkConfig() { SetOutputIdentity = true, CalculateStats = true }; // if (updatePropertiesBy != null && updatePropertiesBy.Length > 0) // { // bulkConfig.UpdateByProperties = GetMemberNames(updatePropertiesBy); // } // await _dbContext.BulkInsertOrUpdateAsync(entities); //} //catch (Exception ex) //{ // _logger.LogError(ex.Message, ex.InnerException); // throw; //} } return(entities); }
private void TablePerTypeInsertTest() { ContextUtil.DbServer = DbServer.SqlServer; using var context = new TestContext(ContextUtil.GetOptions()); context.LogPersonReports.Add(new LogPersonReport { }); // used for initial add so that after RESEED it starts from 1, not 0 context.SaveChanges(); context.Truncate <LogPersonReport>(); context.Database.ExecuteSqlRaw($"DELETE FROM {nameof(Log)}"); context.Database.ExecuteSqlRaw($"DBCC CHECKIDENT('{nameof(Log)}', RESEED, 0);"); int nextLogId = GetLastRowId(context, tableName: nameof(Log)); int numberOfNewToInsert = 1000; var entities = new List <LogPersonReport>(); for (int i = 1; i <= numberOfNewToInsert; i++) { nextLogId++; // OPTION 1. var entity = new LogPersonReport { LogId = nextLogId, // OPTION 1. PersonId = (i % 22), RegBy = 15, CreatedDate = DateTime.Now, ReportId = (i % 22) * 10, LogPersonReportTypeId = 4, }; entities.Add(entity); } var bulkConfigBase = new BulkConfig { SqlBulkCopyOptions = Microsoft.Data.SqlClient.SqlBulkCopyOptions.KeepIdentity, // OPTION 1. - to ensure insert order is kept the same since SqlBulkCopy does not guarantee it. CustomDestinationTableName = nameof(Log), PropertiesToInclude = new List <string> { nameof(LogPersonReport.LogId), nameof(LogPersonReport.PersonId), nameof(LogPersonReport.RegBy), nameof(LogPersonReport.CreatedDate) } }; var bulkConfig = new BulkConfig { PropertiesToInclude = new List <string> { nameof(LogPersonReport.LogId), nameof(LogPersonReport.ReportId), nameof(LogPersonReport.LogPersonReportTypeId) } }; context.BulkInsert(entities, bulkConfigBase, type: typeof(Log)); // to base 'Log' table //foreach(var entity in entities) { // OPTION 2. Could be set here if Id of base table Log was set by Db (when Op.2. used 'Option 1.' have to be commented out) // entity.LogId = ++nextLogId; //} context.BulkInsert(entities, bulkConfig); // to 'LogPersonReport' table Assert.Equal(nextLogId, context.LogPersonReports.OrderByDescending(a => a.LogId).FirstOrDefault().LogId); }
private void RunInsert(bool isBulkOperation) { using (var context = new TestContext(ContextUtil.GetOptions())) { var entities = new List <Item>(); var subEntities = new List <ItemHistory>(); for (int i = 1, j = -(EntitiesNumber - 1); i < EntitiesNumber; i++, j++) { var entity = new Item { ItemId = isBulkOperation ? j : 0, Name = "name " + i, Description = "info " + Guid.NewGuid().ToString().Substring(0, 3), Quantity = i % 10, Price = i / (i % 5 + 1), TimeUpdated = DateTime.Now, ItemHistories = new List <ItemHistory>() }; var subEntity1 = new ItemHistory { ItemHistoryId = SeqGuid.Create(), Remark = $"some more info {i}.1" }; var subEntity2 = new ItemHistory { ItemHistoryId = SeqGuid.Create(), Remark = $"some more info {i}.2" }; entity.ItemHistories.Add(subEntity1); entity.ItemHistories.Add(subEntity2); entities.Add(entity); } if (isBulkOperation) { if (ContextUtil.DbServer == DbServer.SqlServer) { using (var transaction = context.Database.BeginTransaction()) { context.BulkInsert( entities, new BulkConfig { PreserveInsertOrder = true, SetOutputIdentity = true, BatchSize = 4000, UseTempDB = true }, (a) => WriteProgress(a) ); foreach (var entity in entities) { foreach (var subEntity in entity.ItemHistories) { subEntity.ItemId = entity.ItemId; // setting FK to match its linked PK that was generated in DB } subEntities.AddRange(entity.ItemHistories); } context.BulkInsert(subEntities); transaction.Commit(); } } else if (ContextUtil.DbServer == DbServer.Sqlite) { using (var connection = (SqliteConnection)context.Database.GetDbConnection()) { connection.Open(); using (var transaction = connection.BeginTransaction()) { var bulkConfig = new BulkConfig() { SqliteConnection = connection, SqliteTransaction = transaction, SetOutputIdentity = true, }; context.BulkInsert(entities, bulkConfig); foreach (var entity in entities) { foreach (var subEntity in entity.ItemHistories) { subEntity.ItemId = entity.ItemId; // setting FK to match its linked PK that was generated in DB } subEntities.AddRange(entity.ItemHistories); } bulkConfig.SetOutputIdentity = false; context.BulkInsert(subEntities, bulkConfig); transaction.Commit(); } } } } else { context.Items.AddRange(entities); context.SaveChanges(); } } using (var context = new TestContext(ContextUtil.GetOptions())) { var temp = context.ItemHistories.FirstOrDefault(); int entitiesCount = ItemsCountQuery(context); //Item lastEntity = LastItemQuery(context); Item lastEntity = context.Items.OrderByDescending(a => a.ItemId).FirstOrDefault(); Assert.Equal(EntitiesNumber - 1, entitiesCount); Assert.NotNull(lastEntity); Assert.Equal("name " + (EntitiesNumber - 1), lastEntity.Name); } }
internal static NpgsqlConnection OpenAndGetNpgsqlConnection(DbContext context, BulkConfig bulkConfig) { context.Database.OpenConnection(); return((NpgsqlConnection)context.Database.GetDbConnection()); }
public int AddPatients(List <PatientDemographyModel> patients) { var dtCount = 0; var entities = new List <PatientDemography>(); try { patients.ForEach(p => { var pEntity = mapper.Map <PatientDemographyModel, PatientDemography>(p); if (p.HivEncounters.Any()) { var encounterEntities = new List <HivEncounter>(); p.HivEncounters.ToList().ForEach(he => { var hEntity = mapper.Map <HivEncounterModel, HivEncounter>(he); encounterEntities.Add(hEntity); }); pEntity.HivEncounter = encounterEntities; } if (p.PatientRegimens.Any()) { var regimenEntities = new List <PatientRegimen>(); p.PatientRegimens.ToList().ForEach(re => { var rEntity = mapper.Map <PatientRegimenModel, PatientRegimen>(re); regimenEntities.Add(rEntity); }); pEntity.PatientRegimen = regimenEntities; } if (p.LaboratoryReports.Any()) { var labEntities = new List <LaboratoryReport>(); p.LaboratoryReports.ToList().ForEach(re => { var rEntity = mapper.Map <LaboratoryReportModel, LaboratoryReport>(re); labEntities.Add(rEntity); }); pEntity.LaboratoryReport = labEntities; } if (p.FingerPrints.Any()) { var fingerEntities = new List <FingerPrint>(); p.FingerPrints.ToList().ForEach(re => { var fEntity = mapper.Map <FingerPrintModel, FingerPrint>(re); fingerEntities.Add(fEntity); }); pEntity.FingerPrint = fingerEntities; } entities.Add(pEntity); }); using (var transaction = _context.Database.BeginTransaction()) { var bulkConfig = new BulkConfig { PreserveInsertOrder = true, SetOutputIdentity = true }; _context.BulkInsertOrUpdate(entities, bulkConfig); entities.ForEach(entity => { entity.HivEncounter.ToList().ForEach(e => { e.PatientId = entity.Id; }); _context.BulkInsertOrUpdate(entity.HivEncounter.ToList()); entity.PatientRegimen.ToList().ForEach(r => { r.PatientId = entity.Id; }); _context.BulkInsertOrUpdate(entity.PatientRegimen.ToList()); entity.LaboratoryReport.ToList().ForEach(l => { l.PatientId = entity.Id; }); _context.BulkInsertOrUpdate(entity.LaboratoryReport.ToList()); entity.FingerPrint.ToList().ForEach(fi => { fi.PatientId = entity.Id; }); _context.BulkInsertOrUpdate(entity.FingerPrint.ToList()); transaction.Commit(); }); } return(dtCount); } catch (Exception ex) { return(0); } }
internal static DbConnection OpenAndGetSqlConnection(DbContext context, BulkConfig config) { context.Database.OpenConnection(); return(context.GetUnderlyingConnection(config)); }
public int TrackPatientData(List <PatientDemographyModel> patients) { try { var processed = 0; if (!patients.Any()) { return(0); } patients.ForEach(p => { long patientId = 0; var duplicates = _context.PatientDemography.Where(m => m.PatientIdentifier.Trim().ToLower() == p.PatientIdentifier.Trim().ToLower() && m.SiteId == p.SiteId).ToList(); if (!duplicates.Any()) { var sites = _context.Site.Where(m => m.SiteId.Trim().ToLower() == p.FacilityId.Trim().ToLower()).ToList(); if (sites.Any()) { var patientEntity = mapper.Map <PatientDemographyModel, PatientDemography>(p); patientEntity.SiteId = sites[0].Id; _context.PatientDemography.Add(patientEntity); _context.SaveChanges(); patientId = patientEntity.Id; } } else { patientId = duplicates[0].Id; } if (patientId > 0) { using (var transaction = _context.Database.BeginTransaction()) { var bulkConfig = new BulkConfig { PreserveInsertOrder = true, SetOutputIdentity = true }; if (p.HivEncounters.Any()) { var encounterEntities = new List <HivEncounter>(); p.HivEncounters.ToList().ForEach(he => { var hEntity = mapper.Map <HivEncounterModel, HivEncounter>(he); hEntity.PatientId = patientId; encounterEntities.Add(hEntity); }); _context.BulkInsertOrUpdate(encounterEntities); } if (p.PatientRegimens.Any()) { var regimenEntities = new List <PatientRegimen>(); p.PatientRegimens.ToList().ForEach(re => { var rEntity = mapper.Map <PatientRegimenModel, PatientRegimen>(re); rEntity.PatientId = patientId; regimenEntities.Add(rEntity); }); _context.BulkInsertOrUpdate(regimenEntities); } if (p.LaboratoryReports.Any()) { var labEntities = new List <LaboratoryReport>(); p.LaboratoryReports.ToList().ForEach(re => { var lEntity = mapper.Map <LaboratoryReportModel, LaboratoryReport>(re); lEntity.PatientId = patientId; labEntities.Add(lEntity); }); _context.BulkInsertOrUpdate(labEntities); } if (p.FingerPrints.Any()) { var fingerEntities = new List <FingerPrint>(); p.FingerPrints.ToList().ForEach(re => { var fEntity = mapper.Map <FingerPrintModel, FingerPrint>(re); fEntity.PatientId = patientId; fingerEntities.Add(fEntity); }); _context.BulkInsertOrUpdate(fingerEntities); } transaction.Commit(); processed++; } } }); _context.SaveChanges(); return(processed); } catch (Exception ex) { return(0); } }
private static Microsoft.Data.SqlClient.SqlBulkCopy GetSqlBulkCopy(Microsoft.Data.SqlClient.SqlConnection sqlConnection, IDbContextTransaction transaction, BulkConfig config) { var sqlBulkCopyOptions = config.SqlBulkCopyOptions; if (transaction == null) { return(new Microsoft.Data.SqlClient.SqlBulkCopy(sqlConnection, sqlBulkCopyOptions, null)); } else { var sqlTransaction = (Microsoft.Data.SqlClient.SqlTransaction)transaction.GetUnderlyingTransaction(config); return(new Microsoft.Data.SqlClient.SqlBulkCopy(sqlConnection, sqlBulkCopyOptions, sqlTransaction)); } }
private static async Task SaveChangesExtendedAsync(IEnumerable <EntityEntry> entries, EfPocEntitiesEFCore context) { List <Operation> ops = new List <Operation>(); foreach (var item in entries) { Console.WriteLine($"Entity: {item.Entity.GetType().Name}. State: {item.State}"); //Adding a count per operation to decide when yo use bulk operations or the default savechanges method. _addOperation(ref ops, item); //foreach (var entry in item.Properties) //{ // Console.WriteLine("--- " + // $"Property '{entry.Metadata.Name}'" + // $" is {(entry.IsModified ? "modified" : "not modified")} " + // $"Current value: '{entry.CurrentValue}' " + // $"Original value: '{entry.OriginalValue}'"); //} } foreach (var op in ops) { Console.WriteLine("========================================================================"); Console.WriteLine($"Bulk Operation. Entity: {op.Type.Name}. State: {op.State}"); Console.WriteLine("========================================================================"); var items = entries.Where(e => e.Entity.GetType() == op.Type && e.State == op.State); Type listType = typeof(List <>).MakeGenericType(op.Type); IList entityList = (IList)Activator.CreateInstance(listType); foreach (var newItem in items) { entityList.Add(newItem.Entity); } var config = new BulkConfig { PreserveInsertOrder = true, SetOutputIdentity = true }; //For now I'm assuming we are always inserting switch (op.Type.Name) { case "Members": if (op.State == EntityState.Added) { await context.BulkInsertAsync((IList <Members>) entityList, config); } break; case "Spans": if (op.State == EntityState.Added) { await context.BulkInsertAsync((IList <Spans>) entityList, config); } break; default: break; } List <ForeignKey> fkList = new List <ForeignKey>(); foreach (var entry in items) { //Get Foreign Keys (properties refecencing Ids of parents) _fillfkList(ref fkList, entry); foreach (var fk in fkList) { Console.WriteLine("Updating Foreign Keys"); //Getting just 1 collection of "Spans" (IGenericCollection<Spans>) var childItems = entry.Entity.GetType() .GetProperties() .Where(p => p.PropertyType.GenericTypeArguments .Contains(fk.ForeignType)) .First().GetValue(entry.Entity, null); foreach (object child in (IEnumerable)childItems) { Console.WriteLine($"Updating child class: {child.GetType()}"); var parentIdValue = child.GetType().GetProperty(fk.ForeignPropertyName).GetValue(child); Console.WriteLine($"Current Value of {fk.ForeignPropertyName}: {parentIdValue}"); var parentIdValueNew = entry.Entity.GetType().GetProperty(fk.CurrentPropertyName).GetValue(entry.Entity); Console.WriteLine($"New Value: {parentIdValueNew}"); child.GetType().GetProperty(fk.ForeignPropertyName).SetValue(child, parentIdValueNew); } } } } }
private void RunInsert(bool isBulk) { using var context = new TestContext(ContextUtil.GetOptions()); var entities = new List <Item>(); var subEntities = new List <ItemHistory>(); for (int i = 1, j = -(EntitiesNumber - 1); i < EntitiesNumber; i++, j++) { var entity = new Item { ItemId = 0, //isBulk ? j : 0, // no longer used since order(Identity temporary filled with negative values from -N to -1) is set automaticaly with default config PreserveInsertOrder=TRUE Name = "name " + i, Description = "info " + Guid.NewGuid().ToString().Substring(0, 3), Quantity = i % 10, Price = i / (i % 5 + 1), TimeUpdated = DateTime.Now, ItemHistories = new List <ItemHistory>() }; var subEntity1 = new ItemHistory { ItemHistoryId = SeqGuid.Create(), Remark = $"some more info {i}.1" }; var subEntity2 = new ItemHistory { ItemHistoryId = SeqGuid.Create(), Remark = $"some more info {i}.2" }; entity.ItemHistories.Add(subEntity1); entity.ItemHistories.Add(subEntity2); entities.Add(entity); } if (isBulk) { if (ContextUtil.DbServer == DbServer.SqlServer) { using var transaction = context.Database.BeginTransaction(); var bulkConfig = new BulkConfig { //PreserveInsertOrder = true, // true is default SetOutputIdentity = true, BatchSize = 4000, UseTempDB = true, CalculateStats = true }; context.BulkInsert(entities, bulkConfig, (a) => WriteProgress(a)); Assert.Equal(EntitiesNumber - 1, bulkConfig.StatsInfo.StatsNumberInserted); Assert.Equal(0, bulkConfig.StatsInfo.StatsNumberUpdated); Assert.Equal(0, bulkConfig.StatsInfo.StatsNumberDeleted); foreach (var entity in entities) { foreach (var subEntity in entity.ItemHistories) { subEntity.ItemId = entity.ItemId; // setting FK to match its linked PK that was generated in DB } subEntities.AddRange(entity.ItemHistories); } context.BulkInsert(subEntities); transaction.Commit(); } else if (ContextUtil.DbServer == DbServer.Sqlite) { using var transaction = context.Database.BeginTransaction(); var bulkConfig = new BulkConfig() { SetOutputIdentity = true }; context.BulkInsert(entities, bulkConfig); foreach (var entity in entities) { foreach (var subEntity in entity.ItemHistories) { subEntity.ItemId = entity.ItemId; // setting FK to match its linked PK that was generated in DB } subEntities.AddRange(entity.ItemHistories); } bulkConfig.SetOutputIdentity = false; context.BulkInsert(subEntities, bulkConfig); transaction.Commit(); } } else { context.Items.AddRange(entities); context.SaveChanges(); } // TEST int entitiesCount = ItemsCountQuery(context); Item lastEntity = context.Items.OrderByDescending(a => a.ItemId).FirstOrDefault(); Assert.Equal(EntitiesNumber - 1, entitiesCount); Assert.NotNull(lastEntity); Assert.Equal("name " + (EntitiesNumber - 1), lastEntity.Name); }
/// <summary> /// Given a collection of images and their keywords, performs a bulk insert /// of them all. This is way more performant than adding the keywords as /// each image is indexed, and allows us to bulk-update the freetext search /// too. /// </summary> /// <param name="imageKeywords"></param> private void AddTags(IDictionary <Image, string[]> imageKeywords) { // See if we have any images that were written to the DB and have IDs if (!imageKeywords.Where(x => x.Key.ImageId != 0).Any()) { return; } var watch = new Stopwatch("AddTags"); using ImageContext db = new ImageContext(); try { var newTags = imageKeywords.Where(x => x.Value != null && x.Value.Any()) .SelectMany(x => x.Value) .Distinct() .Where(x => _tagCache != null && !_tagCache.ContainsKey(x)) .Select(x => new Models.Tag { Keyword = x, Type = "IPTC" }) .ToList(); if (newTags.Any()) { Logging.LogTrace("Adding {0} tags", newTags.Count()); if (!ImageContext.ReadOnly) { var config = new BulkConfig { SetOutputIdentity = true }; db.BulkInsert(newTags, config); } // Add the new items to the cache. foreach (var tag in newTags) { _tagCache[tag.Keyword] = tag; } } } catch (Exception ex) { Logging.LogError("Exception adding Tags: {0}", ex); } if (!ImageContext.ReadOnly) { using (var transaction = db.Database.BeginTransaction()) { try { var newImageTags = imageKeywords.SelectMany(i => i.Value.Select( v => new ImageTag { ImageId = i.Key.ImageId, TagId = _tagCache[v].TagId })) .ToList(); // Note that we need to delete all of the existing tags for an image, // and then insert all of the new tags. This is so that if somebody adds // one tag, and removes another, we maintain the list correctly. Logging.LogTrace($"Updating {newImageTags.Count()} ImageTags"); db.ImageTags.Where(y => newImageTags.Select(x => x.ImageId).Contains(y.ImageId)).BatchDelete(); db.BulkInsertOrUpdate(newImageTags); transaction.Commit(); db.FullTextTags(false); } catch (Exception ex) { Logging.LogError("Exception adding ImageTags: {0}", ex); } } } watch.Stop(); }
public void InsertOrUpdateBulk(List <TEntity> insertedList, BulkConfig bulkConfig) { _context.DbContext.BulkInsertOrUpdate(insertedList, bulkConfig); }
private static SqlBulkCopy GetSqlBulkCopy(SqlConnection sqlConnection, IDbContextTransaction transaction, BulkConfig config) { var sqlTransaction = transaction == null ? null : (SqlTransaction)transaction.GetUnderlyingTransaction(config); var sqlBulkCopy = new SqlBulkCopy(sqlConnection, config.SqlBulkCopyOptions, sqlTransaction); return(sqlBulkCopy); }
public static Task ExecuteAsync <T>(DbContext context, Type type, IList <T> entities, OperationType operationType, BulkConfig bulkConfig, Action <decimal> progress, CancellationToken cancellationToken) where T : class { using (ActivitySources.StartExecuteActivity((EFCore.BulkExtensions.OperationType)operationType, entities.Count)) { if (operationType != OperationType.Truncate && entities.Count == 0) { return(Task.CompletedTask); } TableInfo tableInfo = TableInfo.CreateInstance(context, type, entities, (EFCore.BulkExtensions.OperationType)operationType, bulkConfig); if (operationType == OperationType.Insert && !tableInfo.BulkConfig.SetOutputIdentity) { return(SqlBulkOperation.InsertAsync(context, entities, tableInfo, progress, cancellationToken)); } else if (operationType == OperationType.Read) { return(SqlBulkOperation.ReadAsync(context, entities, tableInfo, progress, cancellationToken)); } else if (operationType == OperationType.Truncate) { return(SqlBulkOperation.TruncateAsync(context, tableInfo, cancellationToken)); } else { return(SqlBulkOperation.MergeAsync(context, entities, tableInfo, operationType, progress, cancellationToken)); } } }
//[InlineData(DbServer.Sqlite)] // No TimeStamp column type but can be set with DefaultValueSql: "CURRENT_TIMESTAMP" as it is in OnModelCreating() method. private void TimeStampTest(DbServer dbServer) { ContextUtil.DbServer = dbServer; using var context = new TestContext(ContextUtil.GetOptions()); context.Truncate <File>(); var entities = new List <File>(); for (int i = 1; i <= EntitiesNumber; i++) { var entity = new File { Description = "Some data " + i }; entities.Add(entity); } context.BulkInsert(entities, bulkAction => bulkAction.SetOutputIdentity = true); // example of setting BulkConfig with Action argument // Test BulkRead var entitiesRead = new List <File> { new File { Description = "Some data 1" }, new File { Description = "Some data 2" } }; context.BulkRead(entitiesRead, new BulkConfig { UpdateByProperties = new List <string> { nameof(File.Description) } }); Assert.Equal(1, entitiesRead.First().FileId); Assert.NotNull(entitiesRead.First().VersionChange); // For testing concurrency conflict (UPDATE changes RowVersion which is TimeStamp column) context.Database.ExecuteSqlRaw("UPDATE dbo.[File] SET Description = 'Some data 1 PRE CHANGE' WHERE [Id] = 1;"); var entitiesToUpdate = entities.Take(10).ToList(); foreach (var entityToUpdate in entitiesToUpdate) { entityToUpdate.Description += " UPDATED"; } using var transaction = context.Database.BeginTransaction(); var bulkConfig = new BulkConfig { SetOutputIdentity = true, DoNotUpdateIfTimeStampChanged = true }; context.BulkUpdate(entitiesToUpdate, bulkConfig); var list = bulkConfig.TimeStampInfo?.EntitiesOutput.Cast <File>().ToList(); Assert.Equal(9, list.Count()); Assert.Equal(1, bulkConfig.TimeStampInfo.NumberOfSkippedForUpdate); if (bulkConfig.TimeStampInfo?.NumberOfSkippedForUpdate > 0) { //Options, based on needs: // 1. rollback entire Update transaction.Rollback(); // 1. rollback entire Update // 2. throw Exception //throw new DbUpdateConcurrencyException() // 3. Update them again // 4. Skip them and leave it unchanged } else { transaction.Commit(); } }
internal static SqliteConnection OpenAndGetSqliteConnection(DbContext context, BulkConfig bulkConfig) { context.Database.OpenConnection(); return((SqliteConnection)context.Database.GetDbConnection()); }
/// <summary> /// Select or deselect an image - adding or removing it from the basket. /// </summary> /// <param name="image"></param> /// <param name="newState"></param> /// TODO: Make this async public void SetBasketState(ICollection <Image> images, bool newState) { try { using var db = new ImageContext(); bool changed = false; var watch = new Stopwatch("SetSelection"); var existingEntries = db.BasketEntries.Where(x => x.BasketId == CurrentBasket.BasketId && images.Select(img => img.ImageId).Contains(x.ImageId)); if (newState) { // TODO: skip existing. Do we need this?! var imagesToAdd = images.Where(img => !existingEntries.Select(x => x.ImageId).Contains(img.ImageId)).ToList(); var basketEntries = imagesToAdd.Select(img => new BasketEntry { ImageId = img.ImageId, BasketId = CurrentBasket.BasketId, DateAdded = DateTime.UtcNow }).ToList(); if (basketEntries.Any()) { var config = new BulkConfig { SetOutputIdentity = true }; db.BulkInsert(basketEntries, config); imagesToAdd.ForEach(img => { img.BasketEntry = basketEntries.First(x => x.ImageId == img.ImageId); SelectedImages.Add(img); }); changed = true; StatusService.Instance.StatusText = $"Added {imagesToAdd.Count} image to the basket."; } } else if (!newState) { int deleted = existingEntries.BatchDelete(); if (deleted > 0) { images.ToList().ForEach(x => { x.BasketEntry = null; }); SelectedImages.RemoveAll(x => images.Select(x => x.ImageId).Contains(x.ImageId)); changed = true; StatusService.Instance.StatusText = $"Removed {deleted} images from the basket."; } } watch.Stop(); if (changed) { NotifyStateChanged(); } } catch (Exception ex) { Logging.LogError($"Unable to update the basket: {ex.Message}"); } }