public override void Init(ConcentratorDataContext context) { base.Init(context); try { using (SqlConnection connection = new SqlConnection(_jdeConnectionString)) { connection.Open(); using (SqlCommand command = new SqlCommand(_mergeTabelQuery, connection)) { int result = command.ExecuteNonQuery(); using (GenericCollectionReader <FB41021S> reader = new GenericCollectionReader <FB41021S>(_retailStockList)) { // TODO: Check if this is correct //BulkLoad(_retailStockTempTable, 10000, reader, _jdeConnectionString); BulkLoad(_retailStockTempTable, 1000, reader, _jdeConnectionString); } } } } catch (Exception ex) { _log.Error("Error execture bulk copy"); } }
public override void Init(ConcentratorDataContext context) { base.Init(context); try { using (SqlConnection connection = new SqlConnection(_connectionString)) { connection.Open(); using (SqlCommand command = new SqlCommand(_concentratorProductAttributeCategoryQuery, connection)) { int result = command.ExecuteNonQuery(); using (GenericCollectionReader <WposProductAttributeValues> reader = new GenericCollectionReader <WposProductAttributeValues>(_products)) { BulkLoad(_concentratorProductAttributeValueTable, 500, reader, _connectionString); } } } } catch (Exception ex) { _log.Error("Error executing bulk copy"); } }
private void SyncNewContentProductGroups(List <ContentProductGroupModel> newCpg, Database db, int connectorID) { string tableName = string.Format("Temp_Content_Product_Group_{0}", connectorID); try { db.Execute(string.Format(@"IF (EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = 'dbo' AND TABLE_NAME = '{0}')) BEGIN drop table {0} END ", tableName)); var q = string.Format(@"CREATE TABLE {0}( ProductID int not null, ConnectorID int not null, ProductGroupMappingID int not null, CreatedBy int not null)", tableName); db.Execute(q); //create temp using (var connection = new SqlConnection(Connection)) { connection.Open(); using (SqlBulkCopy copyBulk = new SqlBulkCopy(connection)) { copyBulk.BatchSize = 100000; copyBulk.BulkCopyTimeout = 3600; copyBulk.DestinationTableName = tableName; copyBulk.NotifyAfter = 100000; copyBulk.SqlRowsCopied += (s, e) => log.DebugFormat("{0} Records inserted ", e.RowsCopied); using (var collection = new GenericCollectionReader <ContentProductGroupModel>(newCpg)) { copyBulk.WriteToServer(collection); } } } db.Execute(string.Format(@"MERGE ContentProductGroup trg using {0} src on src.connectorid = trg.connectorid and src.productid = trg.productid and src.productgroupmappingid = trg.productgroupmappingid and trg.iscustom = 0 when not matched by target then insert (productid, connectorid, productgroupmappingid, createdby, [exists], isexported) values (src.productid, src.connectorid, src.productgroupmappingid, src.createdby, 1, 1) when not matched by source and trg.connectorid = {1} and trg.iscustom = 0 then delete;", tableName, connectorID)); db.Execute(string.Format("Drop table {0}", tableName)); } catch (Exception e) { log.Debug("Synchronization of content product groups failed", e); throw e; } }
public override void Init(ConcentratorDataContext context) { base.Init(context); try { var _tempDescriptions = _data.Descriptions; context.ExecuteStoreCommand(_tempDescriptionsQuery); using (GenericCollectionReader <tempDescriptionsModel> reader = new GenericCollectionReader <tempDescriptionsModel>(_tempDescriptions)) { BulkLoad(_tempDescriptionsTable, 500, reader); } var _tempSeries = _data.Series; context.ExecuteStoreCommand(_tempSeriesQuery); using (GenericCollectionReader <tempSeriesModel> reader = new GenericCollectionReader <tempSeriesModel>(_tempSeries)) { BulkLoad(_tempSeriesTable, 500, reader); } context.ExecuteStoreCommand(_tempCategoriesQuery); var _tempCategories = _data.Categories; using (GenericCollectionReader <tempCategoriesModel> reader = new GenericCollectionReader <tempCategoriesModel>(_tempCategories)) { BulkLoad(_tempCategoriesTable, 500, reader); } context.ExecuteStoreCommand(_tempRelatedProductsQuery); var _tempRelatedProducts = _data.RelatedProducts; using (GenericCollectionReader <tempRelatedProductsModel> reader = new GenericCollectionReader <tempRelatedProductsModel>(_tempRelatedProducts)) { BulkLoad(_tempRelatedProductsTable, 500, reader); } context.ExecuteStoreCommand(_tempStockandPriceQuery); var _tempStockandPrice = _data.StockandPrice; using (GenericCollectionReader <tempStockandPriceModel> reader = new GenericCollectionReader <tempStockandPriceModel>(_tempStockandPrice)) { BulkLoad(_tempStockandPriceTable, 500, reader); } } catch (Exception ex) { _log.Error("Error executing bulk copy"); } }
public override void Init(ConcentratorDataContext context) { base.Init(context); try { context.ExecuteStoreCommand(__vendorBarcodeTableQuery); using (GenericCollectionReader <VendorImportBarcode> reader = new GenericCollectionReader <VendorImportBarcode>(_barcodes)) { BulkLoad(__vendorBarcodeImportTableName, 500, reader); } } catch { _log.Error("Error execture bulk copy"); } }
public override void Init(ConcentratorDataContext context) { base.Init(context); try { context.ExecuteStoreCommand(_mergeTabelQuery); using (GenericCollectionReader <ConnectFlowAdditionalInformation> reader = new GenericCollectionReader <ConnectFlowAdditionalInformation>(_insuranceList)) { BulkLoad(_attributesTable, 1000, reader); } } catch (Exception ex) { _log.Error("Error execture bulk copy"); } }
public override void Init(ConcentratorDataContext context) { base.Init(context); try { context.ExecuteStoreCommand(__vendorStockTableQuery); using (GenericCollectionReader <VendorImportRetailStock> reader = new GenericCollectionReader <VendorImportRetailStock>(_stockAssortment)) { BulkLoad(__vendorStockImportTableName, 500, reader); } } catch (Exception ex) { _log.Error("Error execture bulk copy"); } }
private void SyncContent(int connectorID, Database db, List <VendorProductInfoWithWehkampInformation> products, string tableName) { var productsConverted = (from p in products select p as VendorProductInfo).ToList(); db.Execute(string.Format(@"IF (EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = 'dbo' AND TABLE_NAME = '{0}')) BEGIN drop table {0} END ", tableName)); var q = string.Format(@"CREATE TABLE {0}( ProductID int not null, ConnectorID int not null, ConnectorPublicationRuleID int not null, ShortDescription nvarchar(2000) null, LongDescription nvarchar(max) null, LineType nvarchar(50) null, LedgerClass nvarchar(50) null, ProductDesk nvarchar(50) null, ExtendedCatalog nvarchar(20) null, ProductMatchID int null, PublicationRuleIndex int not null )", tableName); db.Execute(q); //create temp using (var connection = new SqlConnection(Connection)) { connection.Open(); using (SqlBulkCopy copyBulk = new SqlBulkCopy(connection)) { copyBulk.BatchSize = 10000; copyBulk.BulkCopyTimeout = 600; copyBulk.DestinationTableName = tableName; copyBulk.NotifyAfter = 10000; copyBulk.SqlRowsCopied += (s, e) => log.DebugFormat("{0} Records inserted ", e.RowsCopied); using (var collection = new GenericCollectionReader <VendorProductInfo>(products)) { copyBulk.WriteToServer(collection); } } } db.Execute(string.Format(@"CREATE NONCLUSTERED INDEX CIN ON {0} (ProductID, ConnectorID)", tableName)); RemoveProductMatchesFromTempTable(tableName, db); db.Execute(string.Format(@"merge content trg using {0} source on trg.productid = source.productid and trg.connectorid = source.connectorid when not matched by target then insert (ProductID, ConnectorID, ShortDescription, LongDescription, LineType, LedgerClass,ProductDesk, ExtendedCatalog, CreatedBy, ConnectorPublicationRuleID) values (source.ProductID, source.ConnectorID, source.ShortDescription, source.LongDescription, source.LineType, source.LedgerClass,source.ProductDesk,1,1, source.connectorpublicationruleid) when matched then update set trg.shortdescription = source.shortdescription, trg.LongDescription = source.longdescription, trg.linetype = source.linetype, trg.ledgerclass = source.ledgerclass, trg.ProductDesk = source.ProductDesk, trg.extendedCatalog = 1, trg.ConnectorPublicationRuleID = source.connectorpublicationruleid when not matched by source and trg.connectorid = {1} then delete; ", tableName, connectorID)); }
private void SyncNewContentProductGroups(List <ContentProductGroupModel> newCpg, Database db, int connectorID, string contentTableName) { string tableName = string.Format("Temp_Content_Product_Group_{0}", connectorID); try { int productGroupmappingID = getProductGroupMappingID(db, connectorID); db.Execute(string.Format(@"IF (EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = 'dbo' AND TABLE_NAME = '{0}')) BEGIN drop table {0} END ", tableName)); var q = string.Format(@"CREATE TABLE {0}( ProductID int not null, ConnectorID int not null, MasterGroupMappingID int not null, CreatedBy int not null)", tableName); db.Execute(q); //create temp using (var connection = new SqlConnection(Connection)) { connection.Open(); using (SqlBulkCopy copyBulk = new SqlBulkCopy(connection)) { copyBulk.BatchSize = 100000; copyBulk.BulkCopyTimeout = 180; copyBulk.DestinationTableName = tableName; copyBulk.NotifyAfter = 100000; copyBulk.SqlRowsCopied += (s, e) => log.DebugFormat("{0} Records inserted ", e.RowsCopied); using (var collection = new GenericCollectionReader <ContentProductGroupModel>(newCpg)) { copyBulk.WriteToServer(collection); } } } db.Execute(string.Format(@"MERGE ContentProductGroup trg using ( select t.* from {0} t inner join content c on c.productid= t.productid and t.connectorid = c.connectorid ) src on src.connectorid = trg.connectorid and src.productid = trg.productid and src.MasterGroupMappingID = trg.MasterGroupMappingID when not matched by target then insert (productid, connectorid, MasterGroupMappingID, createdby, [exists], productgroupmappingid) values (src.productid, src.connectorid, src.MasterGroupMappingID, src.createdby, 1, {2}) when not matched by source and trg.connectorid = {1} and trg.iscustom = 0 then delete ;", tableName, connectorID, productGroupmappingID)); db.Execute(string.Format(@"merge mastergroupmappingproduct trg using ( select t.mastergroupmappingid, t.productid, tc.ConnectorPublicationRuleID from {0} t inner join {1} tc on t.productid = tc.productid and t.connectorid = tc.connectorid ) src on src.mastergroupmappingid = trg.mastergroupmappingid and src.productid = trg.productid when not matched by target then insert (Mastergroupmappingid, productid, isapproved, iscustom, isproductmapped, connectorpublicationruleid) values (src.Mastergroupmappingid, src.productid, 0, 0, 1, src.connectorpublicationruleid) when not matched by source and iscustom = 0 and trg.mastergroupmappingid in (select mastergroupmappingid from mastergroupmapping where connectorid = {2}) then delete;" , tableName, contentTableName, connectorID)); db.Execute(string.Format("Drop table {0}", tableName)); } catch (Exception e) { log.Debug("Synchronization of content product groups failed", e); throw e; } }
public override void Init(ConcentratorDataContext context) { base.Init(context); try { context.ExecuteStoreCommand(_vendorStockTableQuery); using (GenericCollectionReader <Concentrator.Objects.Vendors.Bulk.VendorAssortmentBulk.VendorImportStock> reader = new GenericCollectionReader <Concentrator.Objects.Vendors.Bulk.VendorAssortmentBulk.VendorImportStock>(_stock)) { BulkLoad(_vendorStockImportTableName, 1000, reader); } } catch (Exception e) { _log.Error("Error execture bulk copy", e); } }