public void UploadBatch() { if (_currentBatch != null) { throw new Exception("Upload must not be called before the batch currently being uploaded is complete"); } _currentBatch = _nextBatch; _nextBatch = new List <T>(); var data = new EnumerableDataReader <T>(_currentBatch, _fields); try { _ingestClient.IngestFromDataReader(data, _ingestionProperties); int recordsUploaded = _currentBatch.Count; _currentBatch = null; _lastUploadTime = DateTime.Now; Console.Write("{0} ", recordsUploaded); } catch (Exception e) { Console.WriteLine(e); } }
/// <summary> /// Initializes a new instance of the <see cref="EnumerableDataReaderFacts"/> class. /// </summary> public EnumerableDataReaderFacts() { this.fixture = new Fixture(); this.data = this.fixture.CreateMany<FakeEntity>(); this.reader = new EnumerableDataReader<FakeEntity>(this.data); }
// Token: 0x060002B0 RID: 688 RVA: 0x00010CA8 File Offset: 0x0000EEA8 private static void InsertDiscoveryLogItems(List <DiscoveryLogItem> items, Guid batchID) { using (IDataReader dataReader = new EnumerableDataReader <DiscoveryLogItem>(new SinglePropertyAccessor <DiscoveryLogItem>().AddColumn("BatchID", (DiscoveryLogItem i) => batchID).AddColumn("EntityType", (DiscoveryLogItem i) => i.EntityType).AddColumn("DisplayName", (DiscoveryLogItem i) => i.DisplayName).AddColumn("NetObjectID", (DiscoveryLogItem i) => i.NetObjectID), items)) { SqlHelper.ExecuteBulkCopy("DiscoveryLogItems", dataReader, SqlBulkCopyOptions.Default); } }
/// <summary> /// Copies all rows in the supplied IEnumerable<> to a destination table specified by the DestinationTableName property of the SqlBulkCopy object. /// </summary> /// <param name="collection">IEnumerable<>. For IEnumerable use other constructor and specify type.</param> public void WriteToServer <T>(IEnumerable <T> collection) { using (var reader = new EnumerableDataReader(collection)) { WriteToServer(new SqlCeBulkCopyDataReaderAdapter(reader)); } }
public void TestEnumerableDataReaderWithIQueryableOfAnonymousType() { var ctx = new NorthwindEntities(); var q = ctx.Orders.Where(o => o.Customers.CustomerID == "VINET").Select( o => new { o.OrderID, o.OrderDate, o.Customers.CustomerID, Total = o.Order_Details.Sum( od => od.Quantity * ((float)od.UnitPrice - ((float)od.UnitPrice * od.Discount))) }); var r = new EnumerableDataReader(q); while (r.Read()) { var values = new object[4]; r.GetValues(values); Console.WriteLine("{0} {1} {2} {3}", values); } }
public async Task BulkUpdateDataInTableWithCompositeAndIdentityPrimaryKey() { List <BulkUpdateItemComposite> actualData = null; using (var helper = CreateHelper(new[] { CompositeWithIdentity_CreateTable, CompositeWithIdentity_InsertData })) using (var bulkUpdate = new SqlServerBulkUpdate(helper.Connection)) { var dataToUpdate = new EnumerableDataReader <BulkUpdateItemComposite>( new[] { new BulkUpdateItemComposite() { Id1 = 1, Id2 = 2, Value = "lorem ipsum 2" }, new BulkUpdateItemComposite() { Id1 = 2, Id2 = 3, Value = "lorem ipsum 3" }, new BulkUpdateItemComposite() { Id1 = 3, Id2 = 6, Value = "lorem ipsum 6" } }, new[] { nameof(BulkUpdateItemComposite.Id1), nameof(BulkUpdateItemComposite.Id2), nameof(BulkUpdateItemIdentity.Value) }); bulkUpdate.DestinationTableName = CompositeWithIdentity_TableName; bulkUpdate.PrimaryKeyColumn = nameof(BulkUpdateItemComposite.Id1) + ", " + nameof(BulkUpdateItemComposite.Id2); await bulkUpdate.UpdateAsync(dataToUpdate); helper.Connection.Open(); actualData = LoadDataForTableWithCompositePk(helper.Connection, CompositeWithIdentity_TableName); } actualData.Should().Equal(new List <BulkUpdateItemComposite>(new[] { new BulkUpdateItemComposite() { Id1 = 1, Id2 = 1, Value = "1 - 1" }, new BulkUpdateItemComposite() { Id1 = 1, Id2 = 2, Value = "lorem ipsum 2" }, new BulkUpdateItemComposite() { Id1 = 2, Id2 = 3, Value = "lorem ipsum 3" }, new BulkUpdateItemComposite() { Id1 = 2, Id2 = 4, Value = "2 - 4" }, new BulkUpdateItemComposite() { Id1 = 3, Id2 = 5, Value = "3 - 5" }, new BulkUpdateItemComposite() { Id1 = 3, Id2 = 6, Value = "lorem ipsum 6" }, })); }
public override void CreateBulkInsertTask(string table, EnumerableDataReader reader, ImportTarget target) { BulkCopyTask task = new BulkCopyTask(new MsSqlBulkCopy(Config.Provider.ConnectionString, SqlBulkCopyOptions.TableLock), table, reader, target.Name, Config.BatchSize, target.Schema); Jobs.Find(j => j.Tag == table).Tasks.Add(task); }
public void Process() { _count = 0; bool aborted = false; try { OnRowsInserted(CopyEventType.Begin, "Initializing"); if (_batchSize > 0) { _bc.BatchSize = _batchSize; _bc.NotifyAfter = _batchSize; } _bc.RowsInserted += (s, e) => { _count = e.Count; e.Abort = OnRowsInserted(CopyEventType.Active); if (e.Abort) { OnRowsInserted(CopyEventType.Error, "Aborted"); aborted = true; } }; _bc.BulkCopyTimeout = 35000; _bc.DestinationTableName = string.IsNullOrEmpty(Schema) ? Table : string.Format("[{0}].{1}", Schema, Table); DataTable st = _readerIn.GetSchemaTable(); using (EnumerableDataReader reader = _readerIn) { for (int i = 0; i < reader.FieldCount; i++) { string fieldName = reader.GetName(i); _bc.AddColumnMappings(fieldName, fieldName, TypeConverter.ToDbType(st.Columns[i].DataType)); } _bc.WriteToServer(reader); reader.Close(); if (!aborted) { OnPostProcess(); } OnComplete(); } } catch (Exception ex) { OnRowsInserted(CopyEventType.Error, ex.Message); } }
private async Task BulkUpdateDataWithCompositePk(OleDbConnection cn) { List <BulkUpdateItemComposite> actualData = null; using (var bulkUpdate = new MsAccessBulkUpdate(cn)) { var dataToUpdate = new EnumerableDataReader <BulkUpdateItemComposite>( new[] { new BulkUpdateItemComposite() { Id1 = 1, Id2 = 2, DataValue = "lorem ipsum 1" }, new BulkUpdateItemComposite() { Id1 = 2, Id2 = 2, DataValue = "lorem ipsum 2" }, new BulkUpdateItemComposite() { Id1 = 3, Id2 = 2, DataValue = "lorem ipsum 3" } }, new[] { nameof(BulkUpdateItemComposite.Id1), nameof(BulkUpdateItemComposite.Id2), nameof(BulkUpdateItemComposite.DataValue) }); bulkUpdate.DestinationTableName = Composite_TableName; bulkUpdate.PrimaryKeyColumn = nameof(BulkUpdateItemComposite.Id1) + ", " + nameof(BulkUpdateItemComposite.Id2); await bulkUpdate.UpdateAsync(dataToUpdate); actualData = LoadDataForTableWithCompositePk(cn, Composite_TableName); } actualData.Should().Equal(new List <BulkUpdateItemComposite>(new[] { new BulkUpdateItemComposite() { Id1 = 1, Id2 = 1, DataValue = "1 - 1" }, new BulkUpdateItemComposite() { Id1 = 1, Id2 = 2, DataValue = "lorem ipsum 1" }, new BulkUpdateItemComposite() { Id1 = 2, Id2 = 1, DataValue = "2 - 1" }, new BulkUpdateItemComposite() { Id1 = 2, Id2 = 2, DataValue = "lorem ipsum 2" }, new BulkUpdateItemComposite() { Id1 = 3, Id2 = 1, DataValue = "3 - 1" }, new BulkUpdateItemComposite() { Id1 = 3, Id2 = 2, DataValue = "lorem ipsum 3" }, })); }
public override void CreateBulkInsertTask(string table, EnumerableDataReader reader, ImportTarget target) { BulkCopyTask task = new BulkCopyTask(new SQLiteBulkInserter(_targetPath, target.Schema, table), table, reader, target.Name, Config.BatchSize, target.Schema); task.PostProcess += PostProcess; // when the task is complete, merge into main file _locks[task.Schema] = new object(); Jobs.Find(j => j.Tag == target.Schema).Tasks.Add(task); }
public void CreateInstanceCorrectly() { var reader = new EnumerableDataReader <DataItem>(new List <DataItem>(), new string[] { "Id", "Name" }); reader.GetOrdinal("Id").Should().Be(0, "Id column must have ordinal 0."); reader.GetOrdinal("Name").Should().Be(1, "Name column must have ordinal 1."); reader.GetName(0).Should().Be("Id", "Column at index 0 must be Id."); reader.GetName(1).Should().Be("Name", "Column at index 1 must be Name."); }
public void ThrowsWhenValueAccessedAfterDisposal() { var localReader = new EnumerableDataReader <FakeEntity>(this.data); localReader.Close(); Action act = () => localReader.GetValue(0); act.ShouldThrow <ObjectDisposedException>(); }
public void ThrowArgumentNullExceptionWhenDataIsNull() { Action createInstance = () => { var instance = new EnumerableDataReader <DataItem>(null, new string[] { "Id" }); }; createInstance.ShouldThrow <ArgumentNullException>() .And.ParamName.Should().Be("data"); }
public void ThrowArgumentExceptionWhenColumnNamesIsEmpty() { Action createInstance = () => { var instance = new EnumerableDataReader <DataItem>(new List <DataItem>(), new string[] { }); }; createInstance.ShouldThrow <ArgumentException>() .And.ParamName.Should().Be("columnNames"); }
/// <summary> /// Copies all rows in the supplied IEnumerable to a destination table specified by the DestinationTableName property of the SqlBulkCopy object. /// Use other constructor for IEnumerable<> /// </summary> /// <param name="collection"></param> /// <param name="elementType"></param> public void WriteToServer(IEnumerable collection, Type elementType) { if (collection == null) { throw new ArgumentNullException("collection"); } using (var reader = new EnumerableDataReader(collection, elementType)) { WriteToServer(new SqlCeBulkCopyDataReaderAdapter(reader)); } }
internal BulkCopyTask(BulkCopyBase bc, string table, EnumerableDataReader readerIn, string site, int batchSize, string schema) { _id = Guid.NewGuid(); _bc = bc; _site = site; _table = table; _schema = schema; _batchSize = batchSize; _readerIn = readerIn; }
public void InsertManyItems() { IEnumerable <Item> data = GetData(); using (var reader = new EnumerableDataReader <Item>(data, new string[] { "Id", "Name" })) { using (var bulkInsert = new MsAccessBulkInsert("connection string")) { bulkInsert.Insert(reader); } } }
public void UpdateManyItems() { IEnumerable <BulkUpdateItem> data = GetItems(); using (var reader = new EnumerableDataReader <BulkUpdateItem>(data, new string[] { "Id", "Name" })) { using (var bulkUpdate = new MsAccessBulkUpdate("connection string")) { bulkUpdate.Update(reader); } } }
public void DataReaderReadClass() { var d = new TestClass[] { new TestClass(1) }; using (var r = EnumerableDataReader.Create(d)) { Assert.AreEqual(1, r.FieldCount); Assert.AreEqual(true, r.Read()); Assert.AreEqual(0, r.GetOrdinal("renamed")); Assert.AreEqual(-1, r.GetOrdinal("myproperty")); Assert.AreEqual(1, r.GetValue(0)); } }
public void ThrowInvalidOperationExceptionWhenColumnNameIsInvalid() { const string invalidColumn = "Lorem"; Action createInstance = () => { var instance = new EnumerableDataReader <DataItem>(new List <DataItem>(), new string[] { "Id", invalidColumn }); }; createInstance.ShouldThrow <InvalidOperationException>() .WithMessage($"*{typeof(DataItem).FullName}*{invalidColumn}*"); }
public async void LoadDataToKusto(string sinkName, List <LogRecordSentinel> list) { try { var data = new EnumerableDataReader <LogRecordSentinel>(list, eventLogFields); var client = KustoIngestClients.FirstOrDefault(x => x.Name.Equals(sinkName)); await client?.IKustoIngestClient.IngestFromDataReaderAsync(data, client.KustoIngestionProperties); } catch (Exception ex) { Console.WriteLine($"LoadToLogFileKusto error: {ex}"); } }
public void LoadMetricsToKusto(string sinkName, List <SentinelCostMetric> list) { try { var data = new EnumerableDataReader <SentinelCostMetric>(list, sentinelCostMetricFields); var client = KustoIngestClients.FirstOrDefault(x => x.Name.Equals(sinkName)); client?.IKustoIngestClient.IngestFromDataReader(data, client.KustoIngestionProperties); } catch (Exception ex) { Console.WriteLine($"LoadToLogFileKusto error: {ex}"); } }
public void UpdateManyItems() { IEnumerable <BulkUpdateItem> data = GetItems(); using (var reader = new EnumerableDataReader <BulkUpdateItem>(data, new string[] { "Id", "Name" })) { using (var bulkUpdate = new SqlServerBulkUpdate("connection string")) { bulkUpdate.DestinationTableName = "TableName"; bulkUpdate.PrimaryKeyColumn = "Id"; bulkUpdate.Update(reader); } } }
private static void InsertValuesIntoTempTable <TKey, TValue>( IDatabase database, IDictionary <TKey, TValue> values, string tempTableName) { database.ExecuteNonQuery( $"CREATE TABLE {tempTableName}([Key] {typeof(TKey).ToSqlDataType()}, [Value] {typeof(TValue).ToSqlDataType()})"); using IBulkInsert bulkInsert = database.CreateBulkInsert(); bulkInsert.DestinationTableName = tempTableName; using var reader = new EnumerableDataReader <KeyValuePair <TKey, TValue> >(values, new string[] { "Key", "Value" }); bulkInsert.Insert(reader); }
public EnumerableDataReaderTests() { // // Setup for testing default mapping using the source entity's property positions as the ordinals. // _enumerable = new[] { new MyTestClass() }; TestHelpers.ExecuteNonQuery(_connectionString, $"DROP TABLE IF EXISTS [dbo].[{_tableName}]"); TestHelpers.ExecuteNonQuery(_connectionString, "CREATE TABLE [dbo].[" + _tableName + "](" + "[Id] [int] IDENTITY(1,1) NOT NULL," + "[Name] [nvarchar](50) NULL," + "[Data] [varbinary](max) NULL," + "CONSTRAINT [PK_" + _tableName + "] PRIMARY KEY CLUSTERED ([Id] ASC)" + ")"); var propertyMappings = typeof(MyTestClass).BuildMappings(); _dataReader = new EnumerableDataReader <MyTestClass>(_enumerable, propertyMappings); _dataReader.Read(); // // Setup for testing custom mapping using [Column(Order = ...)] to specify ordinals on the source // entity. This is useful when the layout of the properties on the source entity doesn't match // the column ordinals in the database table (e.g., tables generated by EF Core <= 2.0, which // seems to create the columns by ordering the property names alphabetically). // _customOrderEnumerable = new[] { new MyCustomOrderTestClass() }; TestHelpers.ExecuteNonQuery(_connectionString, $"DROP TABLE IF EXISTS [dbo].[{_customOrderTableName}]"); TestHelpers.ExecuteNonQuery(_connectionString, "CREATE TABLE [dbo].[" + _customOrderTableName + "](" + "[Id] [int] IDENTITY(1,1) NOT NULL," + "[FirstName] [nvarchar](50) NULL," + "[MiddleName] [nvarchar](50) NULL," + "[LastName] [nvarchar](50) NULL," + "CONSTRAINT [PK_" + _customOrderTableName + "] PRIMARY KEY CLUSTERED ([Id] ASC)" + ")"); var customOrderPropertyMappings = typeof(MyCustomOrderTestClass).BuildMappings(); _customOrderDataReader = new EnumerableDataReader <MyCustomOrderTestClass>(_customOrderEnumerable, customOrderPropertyMappings); _customOrderDataReader.Read(); }
public void ThrowsWhenReadAfterDisposed() { var localReader = new EnumerableDataReader <FakeEntity>(this.data); localReader.IsClosed.Should().BeFalse(); localReader.Close(); localReader.IsClosed.Should().BeTrue(); Action act = () => localReader.Read(); act.ShouldThrow <ObjectDisposedException>() .Where(ex => ex.ObjectName == "EnumerableDataReader"); }
public void DataReaderWriteAsArray() { var d = new TestStruct[] { new TestStruct(1, new DateTime(1997, 7, 1), 7.1f), new TestStruct(3, new DateTime(2046, 10, 1), null) }; using (var r = new EnumerableDataReader <TestStruct> (d)) using (var w = new System.IO.StreamWriter(new System.IO.MemoryStream())) { r.WriteAsDataArray(w, Json.Manager); w.Flush(); w.BaseStream.Seek(0, System.IO.SeekOrigin.Begin); using (var tr = new System.IO.StreamReader(w.BaseStream)) { Assert.AreEqual("[[1,\"1997-07-01T00:00:00\",7.1],[3,\"2046-10-01T00:00:00\",null]]", tr.ReadToEnd()); } } }
/// <summary> /// Bulks copy entities /// </summary> /// <typeparam name="TEntity">The type of the entity.</typeparam> /// <param name="table">The table.</param> /// <param name="entities">The entities.</param> public static void BulkCopy <TEntity>(this Table <TEntity> table, IEnumerable <TEntity> entities) where TEntity : class { using (var bulk = new SqlBulkCopy(Utils.Instance.ConnectionString)) { var reader = new EnumerableDataReader <TEntity>(entities); foreach (var column in reader.ColumnMappingList) { bulk.ColumnMappings.Add(column.Key, column.Value); } bulk.DestinationTableName = reader.TableName; bulk.WriteToServer(reader); } }
// Token: 0x06000683 RID: 1667 RVA: 0x00026D08 File Offset: 0x00024F08 public void Update(IEnumerable <OrionFeature> features) { using (SqlConnection sqlConnection = DatabaseFunctions.CreateConnection()) { using (SqlTransaction sqlTransaction = sqlConnection.BeginTransaction()) { SqlHelper.ExecuteNonQuery(SqlHelper.GetTextCommand("TRUNCATE TABLE OrionFeatures"), sqlConnection, sqlTransaction); using (EnumerableDataReader <OrionFeature> enumerableDataReader = new EnumerableDataReader <OrionFeature>(new SinglePropertyAccessor <OrionFeature>().AddColumn("Name", (OrionFeature n) => n.Name).AddColumn("Enabled", (OrionFeature n) => n.Enabled), features)) { SqlHelper.ExecuteBulkCopy("OrionFeatures", enumerableDataReader, sqlConnection, sqlTransaction, SqlBulkCopyOptions.Default); } sqlTransaction.Commit(); } } }
public virtual void Configure() { foreach (ImportTarget target in Config.Targets) { foreach (Type type in Config.GetAllTableTypes()) { MethodInfo methodInfo = typeof(SoBase <>).MakeGenericType(type).GetMethod("FromXmlDocument"); IEnumerable sequence = (IEnumerable)methodInfo.Invoke(null, new object[] { target.Path, target.Name }); EnumerableDataReader reader = new EnumerableDataReader(sequence); CreateBulkInsertTask(type.Name, reader, target); } PrepareDatabase(target.Schema); } }
public void DataReaderReadStruct() { var d = new TestStruct[] { new TestStruct (1, new DateTime (1997, 7, 1), 7.1f), new TestStruct (3, new DateTime (2046, 10, 1), null) }; using (var r = new EnumerableDataReader<TestStruct> (d)) { var xi = r.GetOrdinal ("X"); var yi = r.GetOrdinal ("Y"); var zi = r.GetOrdinal ("Z"); Assert.AreEqual (true, r.Read ()); Assert.AreEqual (1, r.GetInt32 (xi)); Assert.AreEqual (new DateTime (1997, 7, 1), r.GetDateTime (yi)); Assert.AreEqual (7.1f, r.GetFloat (zi)); Assert.AreEqual (true, r.Read ()); Assert.AreEqual (3, r.GetInt32 (xi)); Assert.AreEqual (new DateTime (2046, 10, 1), r.GetDateTime (yi)); Assert.IsNull (r.GetValue (zi)); } }
public void Initialize_Test() { this.enumerable = new[] { new MyTestClass() }; TestHelpers.ExecuteNonQuery(connectionString, "CREATE TABLE [dbo].[" + tableName + "](" + "[Id] [int] IDENTITY(1,1) NOT NULL," + "[Name] [nvarchar](50) NULL," + "CONSTRAINT [PK_" + tableName + "] PRIMARY KEY CLUSTERED ([Id] ASC)" + ")"); var mapping = MapBuilder.MapAllProperties<MyTestClass>(); var propertyMappings = ((MapBuilderContext<MyTestClass>)mapping).GetPropertyMappings(); AutoDiscover.Mappings(connectionString, tableName, propertyMappings); this.dataReader = new EnumerableDataReader<MyTestClass>(enumerable, propertyMappings); dataReader.Read(); }
private static void InsertValuesIntoTempTable <TValue>( IDatabase database, IEnumerable <TValue> values, string tempTableName) { TableInfo tableInfo = Database.DatabaseMapper.GetTableInfo <TValue>(); string columns = GetColumnsWithSqlTypes(tableInfo, typeof(TValue)); database.ExecuteNonQuery($"CREATE TABLE {tempTableName} ( {columns} )"); using IBulkInsert bulkInsert = database.CreateBulkInsert(); bulkInsert.DestinationTableName = tempTableName; using var reader = new EnumerableDataReader <TValue>(values, GetColumns(tableInfo, typeof(TValue))); bulkInsert.Insert(reader); }
public void DataReaderInt32NotSupported() { var d = new int[] { 1, 2, 3 }; var k = new EnumerableDataReader<int> (d, false); }
public void ThrowsWhenValueAccessedAfterDisposal() { var localReader = new EnumerableDataReader<FakeEntity>(this.data); localReader.Close(); Action act = () => localReader.GetValue(0); act.ShouldThrow<ObjectDisposedException>(); }
public void ThrowsWhenReadAfterDisposed() { var localReader = new EnumerableDataReader<FakeEntity>(this.data); localReader.IsClosed.Should().BeFalse(); localReader.Close(); localReader.IsClosed.Should().BeTrue(); Action act = () => localReader.Read(); act.ShouldThrow<ObjectDisposedException>() .Where(ex => ex.ObjectName == "EnumerableDataReader"); }
public void DataReaderStringNotSupported() { var d = new string[] { "1", "2", "3" }; var k = new EnumerableDataReader<string> (d, false); }