private async Task BatchInsertExecuter(List <AsyncInputEvent <AppendInput, bool> > wrapperList) { using var conn = CreateConnection() as SqlConnection; await conn.OpenAsync(); using var trans = conn.BeginTransaction(); try { using var bulkCopy = new SqlBulkCopy(conn, Microsoft.Data.SqlClient.SqlBulkCopyOptions.UseInternalTransaction, trans) { DestinationTableName = options.Value.TableName, BatchSize = wrapperList.Count }; using var dt = new DataTable(); dt.Columns.Add("UnitName", typeof(string)); dt.Columns.Add("TransactionId", typeof(long)); dt.Columns.Add("Data", typeof(string)); dt.Columns.Add("Status", typeof(int)); foreach (var item in wrapperList) { var row = dt.NewRow(); row["UnitName"] = item.Value.UnitName; row["TransactionId"] = item.Value.TransactionId; row["Data"] = item.Value.Data; row["Status"] = (int)item.Value.Status; dt.Rows.Add(row); } await conn.OpenAsync(); await bulkCopy.WriteToServerAsync(dt); trans.Commit(); wrapperList.ForEach(wrap => wrap.TaskSource.TrySetResult(true)); } catch { trans.Rollback(); using var insert_trans = conn.BeginTransaction(); try { await conn.ExecuteAsync(insert_sql, wrapperList.Select(wrapper => new { wrapper.Value.UnitName, wrapper.Value.TransactionId, wrapper.Value.Data, Status = (short)wrapper.Value.Status }).ToList(), insert_trans); insert_trans.Commit(); wrapperList.ForEach(wrap => wrap.TaskSource.TrySetResult(true)); } catch (Exception e) { insert_trans.Rollback(); wrapperList.ForEach(wrap => wrap.TaskSource.TrySetException(e)); } } }
private async Task BatchProcessing(List <AsyncInputEvent <AppendInput, bool> > wrapperList) { try { using var conn = CreateConnection() as SqlConnection; using var bulkCopy = new SqlBulkCopy(conn) { DestinationTableName = options.Value.TableName, BatchSize = wrapperList.Count }; using var dt = new DataTable(); dt.Columns.Add("UnitName", typeof(string)); dt.Columns.Add("TransactionId", typeof(long)); dt.Columns.Add("Data", typeof(string)); dt.Columns.Add("Status", typeof(int)); foreach (var item in wrapperList) { var row = dt.NewRow(); row["UnitName"] = item.Value.UnitName; row["TransactionId"] = item.Value.TransactionId; row["Data"] = item.Value.Data; row["Status"] = (int)item.Value.Status; dt.Rows.Add(row); } await conn.OpenAsync(); await bulkCopy.WriteToServerAsync(dt); wrapperList.ForEach(wrap => wrap.TaskSource.TrySetResult(true)); } catch { var saveSql = $"if NOT EXISTS(SELECT * FROM {options.Value.TableName} where UnitName=@UnitName and TransactionId=@TransactionId)INSERT INTO {options.Value.TableName}(UnitName,TransactionId,Data,Status) VALUES(@UnitName,@TransactionId,@Data,@Status)"; using var conn = CreateConnection(); await conn.OpenAsync(); using var trans = conn.BeginTransaction(); try { await conn.ExecuteAsync(saveSql, wrapperList.Select(wrapper => new { wrapper.Value.UnitName, wrapper.Value.TransactionId, wrapper.Value.Data, Status = (short)wrapper.Value.Status }).ToList(), trans); trans.Commit(); wrapperList.ForEach(wrap => wrap.TaskSource.TrySetResult(true)); } catch (Exception e) { trans.Rollback(); wrapperList.ForEach(wrap => wrap.TaskSource.TrySetException(e)); } } }
internal static Guid CreateCalculationRec(string connStr, DateTime CalcBeg_Time, CalcRec.HydrCalcDataRec[] recs) { var hc = new PPM.Pipeline.Fact.ApiModels.Service.HydraulicCalculation() { Id = Guid.NewGuid(), StartCalculationTime = CalcBeg_Time, StopCalculationTime = DateTime.UtcNow, SegmentsCount = (recs == null) ? -1 : recs.Length, }; int[] nStatus = new int[(int)CalcRec.CalcStatus.MaxValue]; if (recs != null) { foreach (var r in recs) { nStatus[(int)r.CalcStatus]++; } } hc.CalculatedCount = nStatus[(int)CalcRec.CalcStatus.Success]; hc.ErrorsCount = nStatus[(int)CalcRec.CalcStatus.Failed]; using (new StopwatchMs("Save into HYDRAULIC_CALCULATION")) using (var loader = new Microsoft.Data.SqlClient.SqlBulkCopy(connStr)) { loader.DestinationTableName = "HYDRAULIC_CALCULATION"; var reader = new BulkDataReader <int>(new[] { 0 }, (_, j, vals) => { int i = 0; vals[i++] = hc.Id; vals[i++] = hc.StartCalculationTime; vals[i++] = hc.StopCalculationTime; vals[i++] = hc.CalculationStatusRd; vals[i++] = hc.PipesCount; vals[i++] = hc.SegmentsCount; vals[i++] = hc.CalculatedCount; vals[i++] = hc.ErrorsCount; vals[i++] = hc.WithSheduler; vals[i++] = hc.Initiator; vals[i++] = Guid.Empty; // FILE_ID }, 11); loader.WriteToServer(reader); } return(hc.Id); }
/// <summary> /// Supports <see cref="Microsoft.Data.SqlClient.SqlBulkCopy"/> /// </summary> /// <typeparam name="T"></typeparam> /// <param name="sqlBulkCopy"></param> /// <param name="entities"></param> /// <param name="setColumnMapping"></param> /// <param name="progress"></param> public void SetSqlBulkCopyConfig <T>(Microsoft.Data.SqlClient.SqlBulkCopy sqlBulkCopy, IList <T> entities, bool setColumnMapping, Action <decimal> progress) { sqlBulkCopy.DestinationTableName = InsertToTempTable ? FullTempTableName : FullTableName; sqlBulkCopy.BatchSize = BulkConfig.BatchSize; sqlBulkCopy.NotifyAfter = BulkConfig.NotifyAfter ?? BulkConfig.BatchSize; sqlBulkCopy.SqlRowsCopied += (sender, e) => { progress?.Invoke(ProgressHelper.GetProgress(entities.Count, e.RowsCopied)); // round to 4 decimal places }; sqlBulkCopy.BulkCopyTimeout = BulkConfig.BulkCopyTimeout ?? sqlBulkCopy.BulkCopyTimeout; sqlBulkCopy.EnableStreaming = BulkConfig.EnableStreaming; if (setColumnMapping) { foreach (var element in PropertyColumnNamesDict) { sqlBulkCopy.ColumnMappings.Add(element.Key, element.Value); } } }
public static void SaveResults(string connStr, CalcRec.HydrCalcDataRec[] recs, ulong[] edgesIDs, DateTime Calc_Time, Guid Calculation_ID) { var dictO2P = new Dictionary <ulong, Guid>(); using (new StopwatchMs("Загрузка справочника преобразования ID OIS в ID PPM")) using (var conn = new Microsoft.Data.SqlClient.SqlConnection(connStr)) { conn.Open(); using (var cmd = conn.CreateCommand()) { cmd.CommandText = "SELECT OIS_ID, PPM_ID FROM OIS_2_PPM WHERE OIS_TABLE_NAME = 'PipeProstoyUchastok'"; using (var rdr = cmd.ExecuteReader()) { while (rdr.Read()) { dictO2P[Convert.ToUInt64(rdr[0])] = rdr.GetGuid(1); } } } } using (new StopwatchMs("Bulk save into HYDRAULIC_CALCULATION_RESULT")) using (var loader = new Microsoft.Data.SqlClient.SqlBulkCopy(connStr)) { loader.DestinationTableName = "HYDRAULIC_CALCULATION_RESULT"; //loader.BatchSize = 1; var reader = new BulkDataReader <CalcRec.HydrCalcDataRec>(recs, (iEdge, r, vals) => { int i = 0; var pu_id = edgesIDs[iEdge]; vals[i++] = dictO2P.TryGetValue(pu_id, out var g) ? g : Guid.Empty; vals[i++] = Calc_Time; vals[i++] = Calculation_ID; r.GetValues(vals, ref i); }, 44); loader.WriteToServer(reader); } }
/// <summary> /// Supports <see cref="Microsoft.Data.SqlClient.SqlBulkCopy"/> /// </summary> /// <typeparam name="T"></typeparam> /// <param name="context"></param> /// <param name="type"></param> /// <param name="entities"></param> /// <param name="sqlBulkCopy"></param> /// <param name="tableInfo"></param> /// <returns></returns> internal static DataTable GetDataTable <T>(DbContext context, Type type, IList <T> entities, Microsoft.Data.SqlClient.SqlBulkCopy sqlBulkCopy, TableInfo tableInfo) { DataTable dataTable = InnerGetDataTable(context, ref type, entities, tableInfo); foreach (DataColumn item in dataTable.Columns) //Add mapping { sqlBulkCopy.ColumnMappings.Add(item.ColumnName, item.ColumnName); } return(dataTable); }