//private void ProcessTaxonomyNodeTrail(TaxonomyInfo node) //{ // var nodeTrail = TaxonomyNodeAuditTrail.FromValue(node.ID); // var nodeHistory = node.TaxonomyDatas; // foreach (var td in nodeHistory) // nodeTrail.TaxonomyNodeAuditTrailRecords.Add(TaxonomyNodeAuditTrailRecord.FromValues(td)); // var tmds = from tmi in node.TaxonomyMetaInfos // from tmd in tmi.TaxonomyMetaDatas // select tmd; // foreach (var tmd in tmds) // { // var trailRecord = TaxonomyNodeAuditTrailRecord.FromValues(tmd); // if (trailRecord != null) // nodeTrail.TaxonomyNodeAuditTrailRecords.Add(trailRecord); // } // nodeTrail.TaxonomyNodeAuditTrailRecords.Sort( // (t1, t2) => t1.AuditTrailTimestamp.Timestamp.CompareTo(t2.AuditTrailTimestamp.Timestamp)); // nodeTrail.TaxonomyNodeAuditTrailRecords[0].AuditTrailTimestamp.ActionType = // TimestampRecordTypeActionType.Created; // nodeTrail.SerializeObject(GetSaveFilePath("NodeHistory", node.ID.ToString())); //} private void ProcessTaxonomySchemas(TaxonomyInfo node) { try { var schemas = from sch in node.SchemaInfos where sch.SchemaDatas.Any(sd => sd.Active) orderby sch.Attribute.AttributeName select sch; schemas.ForEach(ProcessSchema); //if (ExportAuditTrail) // ProcessSchemaTrails(node); } catch (Exception exception) { var message = string.Empty; var ex = exception; while (ex != null) { message += ex.Message + Environment.NewLine; message += ex.StackTrace + Environment.NewLine; ex = ex.InnerException; } CurrentLogWriter.Warn("There was a problem processing schema for node." + Environment.NewLine + message); } }
private void ProcessTaxonomySkus(TaxonomyInfo node) { try { //Must use independent DataContext to conserve memory using (var dc = new AryaDbDataContext(Arguments.ProjectId, Arguments.UserId)) { var allSkus = from si in dc.SkuInfos where si.Active && si.TaxonomyID == node.ID let sku = si.Sku where sku.SkuType == Sku.ItemType.Product.ToString() select sku; var skus = ((AdvancedExportArgs)Arguments).GetFilteredSkuList(allSkus).Select(s => s.ID).ToList(); skus.AsParallel().ForAll(ProcessSku); //skus.ForEach(ProcessSku); } } catch (Exception exception) { var message = string.Empty; var ex = exception; while (ex != null) { message += ex.Message + Environment.NewLine; message += ex.StackTrace + Environment.NewLine; ex = ex.InnerException; } CurrentLogWriter.Warn("There was a problem processing skus in node." + Environment.NewLine + message); } }
private void ProcessTaxonomyChildren(TaxonomyInfo node) { try { var children = from td in node.ChildTaxonomyDatas where td.Active select td.TaxonomyInfo; foreach (var child in children) { ProcessTaxonomyNode(node, child); } } catch (Exception exception) { var message = string.Empty; var ex = exception; while (ex != null) { message += ex.Message + Environment.NewLine; message += ex.StackTrace + Environment.NewLine; ex = ex.InnerException; } CurrentLogWriter.Warn("There was a problem processing children for node." + Environment.NewLine + message); } }
private void ProcessCatalog() { _project = (from project in CurrentDb.Projects where project.ID == _args.ProjectId select project).First(); var aryaUser = CurrentDb.Users.First(u => u.ID == _project.CreatedBy); var lastUpdatedTimestamp = TimestampRecordType.FromValues(_project.CreatedOn, User.FromAryaUser(aryaUser)); CurrentLogWriter.Debug("Processing " + _project); _productCatalog = new ProductCatalog { Id = _project.ID, Company = _project.ClientDescription, Type = "STANDARD", ProductCatalogNames = ProductCatalogProductCatalogName.FromName(_project.SetName), TaxonomyMetaDataLanguageVersionss = ProductCatalogTaxonomyMetaDataLanguageVersions.FromAryaProject( _project), SchemaMetaDataLanguageVersionss = ProductCatalogSchemaMetaDataLanguageVersions.FromAryaProject( _project), LastUpdatedTimestamp = lastUpdatedTimestamp }; var taxonomyNode = TaxonomyNode.FromValues(_project.CreatedOn, _project.ID, _project.ToString(), Guid.Empty, _project.ID); taxonomyNode.IsRoot = true; taxonomyNode.SerializeObject(GetSaveFilePath("Node", _project.ID.ToString())); _productCatalog.SerializeObject(GetSaveFilePath("Catalog", _productCatalog.Id.ToString())); ProcessTaxonomyNodes(); }
private void ProcessSku(ItemNode node, Sku sku) { try { using (var dc = new AryaDbDataContext(Arguments.ProjectId, Arguments.UserId)) { var dbItem = dc.Skus.Single(s => sku.ID == s.ID); var psp = (from ei in dbItem.EntityInfos from ed in ei.EntityDatas where ed.Active && (ed.Attribute.AttributeName.ToLower().Contains("primary keyword") || ed.Attribute.AttributeName.ToLower().Contains("psp")) orderby ed.Attribute.AttributeName descending select ed.Value).FirstOrDefault() ?? "Item " + dbItem.ItemID; var resultItem = Item.FromValues(node, dbItem.ID, dbItem.ItemID, psp); ProcessAttributeValues(dbItem, resultItem); resultItem.SerializeObject(GetSaveFilePath("Item", dbItem.ID.ToString())); } } catch (Exception ex) { var message = Environment.NewLine + "Method: ProcessSku"; var e = ex; while (e != null) { message = Environment.NewLine + e.Message; e = e.InnerException; } CurrentLogWriter.Error(ex.Source + message + Environment.NewLine + ex.StackTrace); if (Summary.Warnings == null) { Summary.Warnings = new List <WorkerWarning>(); } Summary.Warnings.Add(new WorkerWarning { ErrorMessage = ex.Message, ErrorDetails = ex.StackTrace, LineData = sku.ItemID }); } }
private void ReadArgumentsFile() { if (ArgumentsType == null) { Arguments = new WorkerArguments(); return; } if (CurrentLogWriter != null) { CurrentLogWriter.DebugFormat("Reading Arguments"); } var settings = ArgumentsType.GetSharpSerializerXmlSettings(WorkerArguments.ArgumentsFileRootName); var serializer = new SharpSerializer(settings); Arguments = (WorkerArguments)serializer.Deserialize(Path.Combine(ArgumentDirectoryPath, ArgumentFileName)); }
private List <string> GetDatabases() { if (string.IsNullOrWhiteSpace(Queries.DatabaseNames)) { throw new Exception("No Databases Found!"); } var databases = new List <string>(); using (var dc = new SqlConnection(Framework.Properties.Settings.Default.AryaDbConnectionString)) { dc.Open(); var whereClause = Queries.DatabaseVersion > 0.0 ? " WHERE AryaCodeBaseVersion=" + Queries.DatabaseVersion : string.Empty; var dbQuery = new SqlCommand( "SELECT name FROM sys.databases WHERE name IN (SELECT DatabaseName FROM Arya..Project" + whereClause + ")", dc); var dbTable = new DataTable("SqlQueries"); using (var dbs = dbQuery.ExecuteReader()) { if (dbs.HasRows) { dbTable.Load(dbs); databases.AddRange(dbTable.Rows.Cast <DataRow>().Select(row => row[0].ToString())); } } } if (Queries.DatabaseNames != "*") { var dbs = Queries.DatabaseNames.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries) .Select(db => db.ToLower().Trim()); databases = databases.Where(db => dbs.Contains(db.ToLower())).ToList(); } if (databases.Count == 0) { CurrentLogWriter.Error("No Databases Found!"); //throw new Exception("No Databases Found!"); } return(databases); }
protected override void FetchExportData() { CurrentLogWriter.Info("Fetching SKUs"); _args = (ExportArgs)Arguments; List <Guid> allTaxIds; using (var dc = new AryaDbDataContext(_args.ProjectId, _args.UserId)) { var taxIds = (from ti in dc.TaxonomyInfos where _args.TaxonomyIds.Contains(ti.ID) select ti).ToList(); allTaxIds = taxIds.SelectMany(ti => ti.AllChildren).Select(ti => ti.ID).ToList(); } _iNodeCount = allTaxIds.Count; allTaxIds.ForEach(ProcessNode); //allTaxIds.AsParallel().ForAll(ProcessNode); //_args.ItemIds.AsParallel().ForAll(sourceItemId => ProcessItem(sourceItemId)); CurrentLogWriter.Info("Saving Results"); ExportDataTables.Add(DuplicateSkusTable); }
private void ProcessNode(Guid taxId) { Interlocked.Increment(ref _iNodeCtr); CurrentLogWriter.InfoFormat("Processing Nodes: {0} of {1}", _iNodeCtr, _iNodeCount); try { var itemValues = GetItemValues(taxId); foreach (var sourceItem in itemValues) { var duplicateItems = (from targetItem in itemValues where targetItem.ItemId != sourceItem.ItemId let result = ProcessItemPair(sourceItem, targetItem) where result != null orderby result.MatchPercent descending select result).FirstOrDefault(); if (duplicateItems != null) { AddRowToTable(duplicateItems); } } } catch (Exception exception) { var message = string.Empty; var ex = exception; while (ex != null) { message += ex.Message + Environment.NewLine; message += ex.StackTrace + Environment.NewLine; ex = ex.InnerException; } CurrentLogWriter.Warn("There was a problem processing skus in node " + taxId + Environment.NewLine + message); } }
public override void Run() { try { if (!CurrentImportOptions.HasFlag(ImportOptions.CreateMissingAttributes)) { State = WorkerState.Complete; StatusMessage = string.Format("Skipped as per ImportOptions"); //TODO: Create new summary report _warnings.Add(new WorkerWarning { LineData = "", ErrorMessage = Resources.CreateMissingAttributesFlagOffWarningMessage }); ProcessSummaryReport(); return; } IEnumerable <AttributeInterchangeRecord> allData = ImportData.Attributes; var attributeInterchangeRecords = allData as AttributeInterchangeRecord[] ?? allData.ToArray(); var totalRecordCount = attributeInterchangeRecords.Count(); CurrentLogWriter.DebugFormat("{0}: Total Records: {1}", Arguments.Id, totalRecordCount); var invalidRecords = attributeInterchangeRecords.GetInvalidRecords(); var invalidAttributeInterchangeRecords = invalidRecords as IList <AttributeInterchangeRecord> ?? invalidRecords.ToList(); CurrentLogWriter.DebugFormat("{0}: Invalid Records: {1}", Arguments.Id, invalidAttributeInterchangeRecords.Count); invalidAttributeInterchangeRecords.ToList() .ForEach( ir => _warnings.Add(new WorkerWarning { LineData = ir.ToString(), ErrorMessage = Resources.RequiredValueNullWarningMessage })); var validImportRecords = attributeInterchangeRecords.Except(invalidAttributeInterchangeRecords.ToList()).ToList(); validImportRecords = validImportRecords.Distinct(new AttributeInterchangeRecordComparer()).ToList();// as List<AttributeInterchangeRecord>; CurrentLogWriter.DebugFormat("{0}: Valid Records: {1}", Arguments.Id, validImportRecords.Count); //Object level operation Start //TODOs //Report all the not found attributes types using (CurrentDbContext = new AryaDbDataContext(CurrentProjectId, ImportRequestedBy)) { CurrentLogWriter.DebugFormat("{0}: Reporting Warnings", Arguments.Id); var existingNonMetaAttributes = CurrentDbContext.Attributes.Where( at => Attribute.NonMetaAttributeTypes.Contains(at.AttributeType)).ToList(); ReportAllWarinings(existingNonMetaAttributes, validImportRecords); //create new attribute CurrentLogWriter.DebugFormat("{0}: Get New Attributes", Arguments.Id); var newAttributes = GetNewAttributes(existingNonMetaAttributes, validImportRecords).ToList(); foreach (var attributeInterchangeRecord in newAttributes) { CreateNewAttribute(attributeInterchangeRecord); } CurrentLogWriter.DebugFormat("{0}: Saving Changes", Arguments.Id); SaveDataChanges(); ProcessSummaryReport(newAttributes.Count()); } //Object level operation End } catch (IndexOutOfRangeException ex) { var newException = new Exception(Resources.InvalidRowInInputFileMessage, ex); Summary.SetError(newException); } catch (Exception ex) { Summary.SetError(ex); } }
private void ProcessRecordWithEntityData(IEnumerable <SkuAttributeValueInterchangeRecord> recordsWithEntityInfo) { //TODO: IMPORTANT same value in file and db is being treated as different values. that means even if same record exists new entity data is created SkuAttributeValueInterchangeRecord[] skuAttributeValueInterchangeRecords = recordsWithEntityInfo as SkuAttributeValueInterchangeRecord[] ?? recordsWithEntityInfo.ToArray(); //skuAttributeValueInterchangeRecords.RemoveAll(item => skuAttributeValueInterchangeRecords.Any())); var duplicateValueEntityIds = skuAttributeValueInterchangeRecords.GroupBy(s => s.EntityID).SelectMany(grp => grp.Skip(1)).Select(s => s.EntityID).Distinct(); var valueEntityIds = duplicateValueEntityIds as Guid?[] ?? duplicateValueEntityIds.ToArray(); foreach (var warningRecord in valueEntityIds) { _warnings.Add(new WorkerWarning { LineData = warningRecord.ToString(), ErrorMessage = Properties.Resources.DuplicateEntityIdWarningMessage }); } skuAttributeValueInterchangeRecords = skuAttributeValueInterchangeRecords.Where(sk => !valueEntityIds.Contains(sk.EntityID)).ToArray(); // var newList = dup.Where(d => skuAttributeValueInterchangeRecords.Contains(d.EntityID)) var sqlTempTableHelper = new SqlHelper(typeof(SkuAttributeValueInterchangeRecord)); var tempEntityInfoRecord = TempExistingEntityInfoTablePrefix + Guid.NewGuid(); var warningTableName = tempEntityInfoRecord + "_warning"; var createTempTableScript = sqlTempTableHelper.CreateTableScript(tempEntityInfoRecord, "tempdb"); var deleteTempTableScript = sqlTempTableHelper.DeleteTableScript(tempEntityInfoRecord, "tempdb"); //create the temp table. CurrentDbContext.ExecuteCommand(createTempTableScript); var markAsBeforeEntity = CurrentImportOptions.HasFlag(ImportOptions.MarkAsBeforeEntity); CurrentLogWriter.DebugFormat("{0}: Inserting Records with EntityInfo", Arguments.Id); CurrentDbContext.BulkInsertAll(skuAttributeValueInterchangeRecords, tempEntityInfoRecord, "tempdb"); CurrentLogWriter.DebugFormat("{0}: Processing Records with EntityInfo", Arguments.Id); var entityInfoValueProcessorQueryString = @"DECLARE @UserID AS UNIQUEIDENTIFIER DECLARE @ProjectID AS UNIQUEIDENTIFIER DECLARE @ResultText AS VARCHAR(2000) DECLARE @DefaultRemark AS UNIQUEIDENTIFIER DECLARE @NewDataCount AS int DECLARE @IgnoredCount AS int DECLARE @AttributeUomUnique AS bit DECLARE @CurrentRemarkId AS UNIQUEIDENTIFIER Declare @MarkAsBeforeEntity as bit SET @MarkAsBeforeEntity = '" + markAsBeforeEntity + @"' SET @UserID = '" + ImportRequestedBy + @"' SET @ProjectID = '" + CurrentProjectId + @"' IF OBJECT_ID('tempdb..#NewEntityDatas') IS NOT NULL DROP TABLE #NewEntityDatas IF OBJECT_ID('[tempdb]..[" + warningTableName + @"]') IS NOT NULL DROP TABLE [tempdb]..[" + warningTableName + @"] CREATE TABLE [tempdb]..[" + warningTableName + @"] ( ItemID varchar(255), AttributeName varchar(255), Uom varchar(255), Value varchar(255), Field1 varchar(255), Field2 varchar(255), Field3 varchar(255), Field4 varchar(255), Field5 varchar(255), EntityID varchar(255), WarningMessage varchar(255) ); SET @IgnoredCount = 0 SET @NewDataCount = 0 SET @ResultText = '' SET @AttributeUomUnique = 1 Select sde.ItemID ,sde.[AttributeName] ,sde.[Uom] ,sde.[value] ,sde.[Field1] ,sde.[Field2] ,sde.[Field3] ,sde.[Field4] ,sde.[Field5] ,sde.[EntityID] ,sd.[EntityID] As ExistingEntityID ,sd.ItemID As ExistingItemId ,sd.[AttributeName] As ExistingAttributeName ,sd.[Uom] As ExistingUom ,sd.[value] As ExistingValue ,sd.[Field1] As ExistingField1 ,sd.[Field2] As ExistingField2 ,sd.[Field3] As ExistingField3 ,sd.[Field4] As ExistingField4 ,sd.[Field5] As ExistingField5 ,a.ID as AttributeID INTO #NewEntityDatas FROM [tempdb]..[" + tempEntityInfoRecord + @"] sde LEFT OUTER JOIN V_ActiveSkuData sd ON sd.EntityID = sde.[EntityID] LEFT OUTER JOIN Attribute a ON LOWER(a.AttributeName) = LOWER(sde.AttributeName) AND a.AttributeType IN ('Sku','Global', 'Derived', 'Flag') --select * from #NewEntityDatas INSERT into [tempdb]..[" + warningTableName + @"](ItemID,AttributeName,Uom,Value,Field1,Field2, Field3, Field4, Field5, EntityID,WarningMessage) SELECT nd.ItemID,nd.AttributeName,nd.Uom,nd.Value,nd.Field1,nd.Field2,nd.Field3,nd.Field4,nd.Field5,nd.EntityID, '" + Resources.AttributeDoesNotExistWarningMessage + @"' FROM #NewEntityDatas nd where nd.AttributeID is NULL INSERT into [tempdb]..[" + warningTableName + @"](ItemID,AttributeName,Uom,Value,Field1,Field2, Field3, Field4, Field5,EntityID,WarningMessage) SELECT nd.ItemID,nd.AttributeName,nd.Uom,nd.Value,nd.Field1,nd.Field2,nd.Field3,nd.Field4,nd.Field5,nd.EntityID,'" + Resources.EntityInfoIDDoesNotExistWarningMessage + @"' FROM #NewEntityDatas nd where nd.ExistingEntityID is NULL DELETE nd FROM #NewEntityDatas nd INNER JOIN [tempdb]..[" + warningTableName + @"] w ON w.EntityID = nd.EntityID --SELECT * FROM #NewEntityDatas --UPDATE Entity Datas UPDATE EntityData SET Active = 0,DeletedOn = GETDATE(),DeletedBy = @UserID, DeletedRemark = @CurrentRemarkId FROM EntityData ed inner join #NewEntityDatas nd ON nd.EntityID = ed.EntityID WHERE ed.Active = 1 --Insert New Entity DATA IF @MarkAsBeforeEntity = 1 BEGIN INSERT INTO EntityData(ID, [AttributeID],[Value],[Uom],[Field1],[Field2],[Field3],[Field4],[Field5],[CreatedOn],[CreatedBy],[CreatedRemark],[Active],[BeforeEntity], EntityID) SELECT NEWID(), td.AttributeId, td.Value, td.Uom,td.Field1, td.[Field2],td.[Field3],td.[Field4],td.[Field5], GETDATE(), @UserID, @CurrentRemarkId,1, 1, td.EntityID FROM #NewEntityDatas td END ELSE BEGIN INSERT INTO EntityData(ID, [AttributeID],[Value],[Uom],[Field1],[Field2],[Field3],[Field4],[Field5],[CreatedOn],[CreatedBy],[CreatedRemark],[Active],[BeforeEntity], EntityID) SELECT NEWID(), td.AttributeId, td.Value, td.Uom,td.Field1, td.[Field2],td.[Field3],td.[Field4],td.[Field5], GETDATE(), @UserID, @CurrentRemarkId,1, 0, td.EntityID FROM #NewEntityDatas td END --SET @ResultText += '" + Resources.NewRecordCountIdentifierText + @"'+ '=' + CAST(@@ROWCOUNT AS VARCHAR(50)) ; SELECT @@ROWCOUNT "; var queryResults = CurrentDbContext.ExecuteQuery <int>(entityInfoValueProcessorQueryString).Single(); _successCountForEntityInfo = queryResults; CurrentDbContext.ExecuteCommand(deleteTempTableScript); var warningRecords = CurrentDbContext.ExecuteQuery <string>(@"SELECT war.ItemID + char(9) + war.AttributeName + char(9) + ISNULL(war.Uom,'') + char(9) + ISNULL(war.Value,'') + char(9) + ISNULL(war.Field1,'') + char(9) + ISNULL(war.Field2,'') + char(9) + ISNULL(war.Field3,'') + char(9) + ISNULL(war.Field4,'') + char(9) + ISNULL(war.Field5,'') + char(9) + ISNULL(war.EntityID,'') + char(9) + war.WarningMessage FROM [tempdb]..[" + warningTableName + @"] war").ToList(); CurrentDbContext.ExecuteCommand(@"IF OBJECT_ID('[tempdb]..[" + warningTableName + @"]') IS NOT NULL DROP TABLE [tempdb]..[" + warningTableName + @"]"); CurrentLogWriter.DebugFormat("{0}: {1} Warnings", Arguments.Id, warningRecords.Count); foreach (var warningRecord in warningRecords) { _warnings.Add(new WorkerWarning { LineData = warningRecord.Substring(0, warningRecord.LastIndexOf('\t')), ErrorMessage = warningRecord.Substring(warningRecord.LastIndexOf('\t') + 1) }); } }
public override void Run() { //State = WorkerState.Working; try { if (!CurrentImportOptions.HasFlag(ImportOptions.CreateMissingValues)) { CurrentLogWriter.Debug(Resources.CreateMissingValuesFlagOffWanringMessage); _warnings.Add(new WorkerWarning { LineData = Resources.CreateMissingValuesFlagOffLineDataText, ErrorMessage = Resources.CreateMissingValuesFlagOffWanringMessage }); } using (CurrentDbContext = new AryaDbDataContext(CurrentProjectId, ImportRequestedBy)) { var allData = ImportData.SkuAttributeValues; CurrentLogWriter.DebugFormat("{0}: Total Records: {1}", Arguments.Id, allData.Count); var invalidRecords = allData.GetInvalidRecords(); var attributeValueInterchangeRecords = invalidRecords as IList <SkuAttributeValueInterchangeRecord> ?? invalidRecords.ToList(); CurrentLogWriter.DebugFormat("{0}: Invalid Records: {1}", Arguments.Id, attributeValueInterchangeRecords.Count); attributeValueInterchangeRecords.ToList() .ForEach( ir => _warnings.Add(new WorkerWarning { LineData = ir.ToString(), ErrorMessage = Resources.RequiredValueNullWarningMessage })); var validImportRecords = allData.Except(attributeValueInterchangeRecords.ToList()).ToList(); CurrentLogWriter.DebugFormat("{0}: Valid Records: {1}", Arguments.Id, validImportRecords.Count); var recordsWithEntityInfo = validImportRecords.Where(ad => ad.EntityID != null); validImportRecords = validImportRecords.Where(ad => ad.EntityID == null).ToList(); // (from row in validImportRecords where !recordsWithEntityInfo.Contains(row) select row).ToList(); var skuAttributeValueInterchangeRecords = recordsWithEntityInfo as SkuAttributeValueInterchangeRecord[] ?? recordsWithEntityInfo.ToArray(); CurrentLogWriter.DebugFormat("{0}: Records with EntityInfo: {1}", Arguments.Id, skuAttributeValueInterchangeRecords.Length); if (skuAttributeValueInterchangeRecords.Count() != 0) { ProcessRecordWithEntityData(skuAttributeValueInterchangeRecords); } //TODO: take out multi value from import data and data bases Start var groupByResults = (from t in validImportRecords let groupItems = GetGroup(t, _attPlusUom) group t by groupItems into grp select grp).ToHashSet(); var multiValues = (from grp in groupByResults where grp.Count() > 1 from row in grp select row).ToHashSet(); CurrentLogWriter.DebugFormat("{0}: {1} multi-Value-sets", Arguments.Id, multiValues.Count); var distinctValues = groupByResults.Where(r => r.Count() == 1) .Select( r => new StringValueWrapperRecord { StringValue = string.Format("{0}|{1}{2}", r.Key.Item1, r.Key.Item2, _attPlusUom ? string.Format("|{0}", r.Key.Item3) : string.Empty) }); //.ToList();} //TODO : Bulk Insert n db and then join with var sqlTempTableHelper = new SqlHelper(typeof(StringValueWrapperRecord)); var tempSkuKeyTableName = TempSkuKeyTablePrefix + Guid.NewGuid(); var createTempTableScript = sqlTempTableHelper.CreateTableScript(tempSkuKeyTableName, "tempdb"); var deleteTempTableScript = sqlTempTableHelper.DeleteTableScript(tempSkuKeyTableName, "tempdb"); //create the temp table. CurrentDbContext.ExecuteCommand(createTempTableScript); CurrentLogWriter.DebugFormat("{0}: Bulk Inserting Distinct Value (non-multi-value) records", Arguments.Id); CurrentDbContext.BulkInsertAll(distinctValues, tempSkuKeyTableName, "tempdb"); //End of bulk insert CurrentLogWriter.DebugFormat("{0}: Processing Distinct Value (non-multi-value) records", Arguments.Id); var query = string.Format(@" SELECT skd.StringValue FROM [tempdb]..[{1}] skd INNER JOIN V_ActiveSkuData vasd ON vasd.ItemID + '|' + vasd.AttributeName {0} = skd.StringValue GROUP BY skd.StringValue Having count(*) > 1 ", _attPlusUom ? " + '|' + ISNULL(Uom,'')" : string.Empty, tempSkuKeyTableName); var groupByResultsDb = CurrentDbContext.ExecuteQuery <StringValueWrapperRecord>(query); CurrentDbContext.ExecuteCommand(deleteTempTableScript); var dbResults = (from row in groupByResultsDb let grp = row.StringValue let parts = grp.Split('|') let itemId = parts[0] let attribute = parts[1] let uom = parts.Length > 2 && parts[2] != string.Empty ? parts[2] : null select new Tuple <string, string, string>(itemId, attribute, uom)).ToHashSet(); multiValues.AddRange(from grp in groupByResults where dbResults.Contains(grp.Key) from row in grp select row); ProcessMultivalues(multiValues); //End multivalue vandle validImportRecords = (from row in validImportRecords where !multiValues.Contains(row) select row).ToList(); sqlTempTableHelper = new SqlHelper(CurrentInterchangeRecordType); var tempTableName = TempTablePrefix + Guid.NewGuid(); var warningTableName = tempTableName + "_warning"; createTempTableScript = sqlTempTableHelper.CreateTableScript(tempTableName, "tempdb"); deleteTempTableScript = sqlTempTableHelper.DeleteTableScript(tempTableName, "tempdb"); //create the temp table. CurrentDbContext.ExecuteCommand(createTempTableScript); CurrentLogWriter.DebugFormat("{0}: Bulk Inserting valid Import Records", Arguments.Id); CurrentDbContext.BulkInsertAll(validImportRecords, tempTableName, "tempdb"); CurrentLogWriter.DebugFormat("{0}: Processing valid Import Records", Arguments.Id); CurrentDbContext.ExecuteCommand(@" --Update entity data, SET active to 0 ALTER TABLE [tempdb]..[" + tempTableName + @"] ADD SkuId UNIQUEIDENTIFIER ,AttributeId UNIQUEIDENTIFIER ,EntityInfoId UNIQUEIDENTIFIER ,EntityDataId UNIQUEIDENTIFIER ,DbField1 nvarchar(4000) ,DbField2 nvarchar(4000) ,DbField3 nvarchar(4000) ,DbField4 nvarchar(4000) ,DbField5 nvarchar(4000)"); var createMissingValues = CurrentImportOptions.HasFlag(ImportOptions.CreateMissingValues); var markAsBeforeEntity = CurrentImportOptions.HasFlag(ImportOptions.MarkAsBeforeEntity); var singleValueProcessorQueryString = string.Format(@"Declare @CreateMissingValues as bit Declare @MarkAsBeforeEntity as bit DECLARE @UserID AS UNIQUEIDENTIFIER DECLARE @ProjectID AS UNIQUEIDENTIFIER DECLARE @ResultText AS VARCHAR(2000) DECLARE @CurrentRemarkId AS UNIQUEIDENTIFIER DECLARE @NewDataCount AS int DECLARE @IgnoredCount AS int SET @UserID = '" + ImportRequestedBy + @"' SET @ProjectID = '" + CurrentProjectId + @"' SET @IgnoredCount = 0 SET @NewDataCount = 0 SET @ResultText = '' SET @CreateMissingValues = '" + createMissingValues + @"' SET @MarkAsBeforeEntity = '" + markAsBeforeEntity + @"' SET @CurrentRemarkId = '" + CurrentRemarkId + @"' IF OBJECT_ID('tempdb..#NewEntityInfo') IS NOT NULL DROP TABLE #NewEntityInfo IF OBJECT_ID('[tempdb]..[" + warningTableName + @"]') IS NOT NULL DROP TABLE [tempdb]..[" + warningTableName + @"] CREATE TABLE [tempdb]..[" + warningTableName + @"] ( ItemID varchar(255), AttributeName varchar(255), Uom varchar(255), Value varchar(4000), Field1 varchar(255), Field2 varchar(255), Field3 varchar(255), Field4 varchar(255), Field5 varchar(255), EntityID varchar(255), WarningMessage varchar(255) ); -- Take Out Missiing Skus INSERT into [tempdb]..[" + warningTableName + @"](ItemID,AttributeName,Uom,Value,Field1,Field2, Field3, Field4, Field5, EntityID,WarningMessage) Select td.ItemID,td.AttributeName,td.Uom,td.Value,td.Field1,td.Field2,td.Field3,td.Field4,td.Field5,td.EntityID, '" + Resources.ItemDoesNotExistWarningMessage + @"' FROM [tempdb]..[" + tempTableName + @"] td LEFT OUTER JOIN Sku s on s.ItemID = td.ItemID where s.ItemID is NULL -- Take Out Missiing attributes INSERT into [tempdb]..[" + warningTableName + @"](ItemID,AttributeName,Uom,Value,Field1,Field2, Field3, Field4, Field5, EntityID ,WarningMessage) Select td.ItemID,td.AttributeName,td.Uom,td.Value,td.Field1,td.Field2,td.Field3,td.Field4,td.Field5,td.EntityID, '" + Resources.AttributeDoesNotExistWarningMessage + @"' FROM [tempdb]..[" + tempTableName + @"] td LEFT OUTER JOIN Attribute a on LOWER(a.AttributeName) = LOWER(td.AttributeName) AND a.AttributeType IN ('Sku','Global', 'Derived', 'Flag') WHERE a.AttributeName is NULL -- DELETE all the warning entries from temp table Delete td from [tempdb]..[" + tempTableName + @"] td inner join [tempdb]..[" + warningTableName + @"] w on td.ItemID = w.ItemID and td.AttributeName = w.AttributeName -- Add a count here for ignored and delete this record DELETE td --select * from [tempdb]..[" + tempTableName + @"] td inner join V_AllSkuData sd on sd.ItemID = td.ItemID AND sd.AttributeName = td.AttributeName AND ISNULL(sd.Uom,'') = ISNULL(td.Uom,'') AND ISNULL(sd.Value,'') = ISNULL(td.Value,'') AND ISNULL(sd.Field1,'') = ISNULL(td.Field1,'') AND ISNULL(sd.Field2,'') = ISNULL(td.Field2,'') AND ISNULL(sd.Field3,'') = ISNULL(td.Field3,'') AND ISNULL(sd.Field4,'') = ISNULL(td.Field4,'') AND ISNULL(sd.Field5,'') = ISNULL(td.Field5,'') Where sd.Active = 1 -- SET @IgnoredCount = @@ROWCOUNT SET @ResultText += '" + Resources.IgnoredRecordCountIdentifierText + @"'+ '=' + CAST(@@ROWCOUNT AS VARCHAR(50)) + ';'; --UPDATE Skuid and attribute id UPDATE [tempdb]..[" + tempTableName + @"] SET SkuId = s.ID, AttributeId = a.ID FROM [tempdb]..[" + tempTableName + @"] td Inner join sku s on s.ItemID = td.ItemID inner join Attribute a on LOWER(a.AttributeName) = LOWER(td.AttributeName) AND a.AttributeType IN ('Sku','Global', 'Derived', 'Flag') --Update Entity info and Entity Data UPDATE [tempdb]..[" + tempTableName + @"] SET EntityInfoId = vasd.EntityId, EntityDataId = vasd.EntityDataId, DbField1 = vasd.Field1, DbField2 = vasd.Field2, DbField3 = vasd.Field3, DbField4 = vasd.Field4, DbField5 = vasd.Field5 FROM [tempdb]..[" + tempTableName + @"] td inner join V_ActiveSkuData vasd on vasd.SkuID = td.SkuId and vasd.AttributeID = td.AttributeId IF @MarkAsBeforeEntity = 1 BEGIN UPDATE EntityData SET BeforeEntity = 0 FROM EntityData ed inner join [tempdb]..[" + tempTableName + @"] td on td.EntityDataId = ed.ID END IF @CreateMissingValues = 0 BEGIN --Delete all the item the are not in the db from temp so that the update will work. DELETE td from [tempdb]..[" + tempTableName + @"] td where EntityInfoId is null END --Update all the existing entitydata set to 0 UPDATE EntityData SET Active = 0,DeletedBy = @UserID,DeletedOn = GETDATE(),DeletedRemark = @CurrentRemarkId From EntityData ed inner join [tempdb]..[" + tempTableName + @"] td on td.EntityDataId = ed.ID WHERE ed.Active = 1 SET @ResultText += '" + Resources.UpdatedRecordCountIdentifierText + @"'+ '=' + CAST(@@ROWCOUNT AS VARCHAR(50)) + ';'; --Update entity info in import data table UPDATE [tempdb]..[" + tempTableName + @"] SET EntityInfoId = NEWID() where [tempdb]..[" + tempTableName + @"].EntityDataId is null --Insert new entity info Insert Into EntityInfo(ID, SkuID) Select td.EntityInfoId, td.SkuId FROM [tempdb]..[" + tempTableName + @"] td where td.EntityDataId is null SET @ResultText += '" + Resources.NewRecordCountIdentifierText + @"'+ '=' + CAST(@@ROWCOUNT AS VARCHAR(50)) ; IF @MarkAsBeforeEntity = 1 BEGIN -- Insert all the record in entity data Insert Into EntityData(ID, [AttributeID],[Value],[Uom],[Field1],[Field2],[Field3],[Field4],[Field5],[CreatedOn],[CreatedBy],[CreatedRemark],[Active],BeforeEntity, EntityID) select NEWID(), td.AttributeId, td.Value, td.Uom, ISNULL(td.[Field1], td.DbField1), ISNULL(td.[Field2], td.DbField2), ISNULL(td.[Field3], td.DbField3), ISNULL(td.[Field4], td.DbField4), ISNULL(td.[Field5], td.DbField5), GETDATE(), @UserID, @CurrentRemarkId,1,1, td.EntityInfoId FROM [tempdb]..[" + tempTableName + @"] td END ELSE BEGIN -- Insert all the record in entity data Insert Into EntityData(ID, [AttributeID],[Value],[Uom],[Field1],[Field2],[Field3],[Field4],[Field5],[CreatedOn],[CreatedBy],[CreatedRemark],[Active], EntityID) select NEWID(), td.AttributeId, td.Value, td.Uom, ISNULL(td.[Field1], td.DbField1), ISNULL(td.[Field2], td.DbField2), ISNULL(td.[Field3], td.DbField3), ISNULL(td.[Field4], td.DbField4), ISNULL(td.[Field5], td.DbField5), GETDATE(), @UserID, @CurrentRemarkId,1, td.EntityInfoId FROM [tempdb]..[" + tempTableName + @"] td END Select @ResultText "); _queryResults = CurrentDbContext.ExecuteQuery <string>(singleValueProcessorQueryString).Single(); CurrentLogWriter.DebugFormat("{0}: Fetching warnings", Arguments.Id); var warningRecords = CurrentDbContext.ExecuteQuery <string>( @"SELECT war.ItemID + char(9) + war.AttributeName + char(9) + ISNULL(war.Uom,'') + char(9) + ISNULL(war.Value,'') + char(9) + ISNULL(war.Field1,'') + char(9) + ISNULL(war.Field2,'') + char(9) + ISNULL(war.Field3,'') + char(9) + ISNULL(war.Field4,'') + char(9) + ISNULL(war.Field5,'') + char(9) + ISNULL(war.EntityID,'') + char(9) + war.WarningMessage FROM[tempdb]..[" + warningTableName + @"] war ").ToList(); //var warningRecords = // CurrentDbContext.ExecuteQuery<string>("select * from [tempdb]..[" + warningTableName + @"]").ToList(); CurrentDbContext.ExecuteCommand(@"IF OBJECT_ID('[tempdb]..[" + warningTableName + @"]') IS NOT NULL DROP TABLE [tempdb]..[" + warningTableName + @"]"); CurrentLogWriter.DebugFormat("{0}: {1} warnings", Arguments.Id, warningRecords.Count); foreach (var warningRecord in warningRecords) { _warnings.Add(new WorkerWarning { LineData = warningRecord.Substring(0, warningRecord.LastIndexOf('\t')), ErrorMessage = warningRecord.Substring(warningRecord.LastIndexOf('\t') + 1) }); } CurrentLogWriter.DebugFormat("{0}: Processing Summary Report", Arguments.Id); ProcessSummaryReport(_queryResults); CurrentDbContext.ExecuteCommand(deleteTempTableScript); } } catch (IndexOutOfRangeException ex) { var newException = new Exception(Resources.InvalidRowInInputFileMessage, ex); Summary.SetError(newException); } catch (Exception ex) { Summary.SetError(ex); } }
//This should be handles in the invalid value seperations //private void ReportNullAttributeValues(List<SkuAttributeValueInterchangeRecord> validImportRecords) //{ // var nullAttributeValueRecords = validImportRecords.Where(ad => ad.Value == null).ToList(); // nullAttributeValueRecords.ForEach(vr => // { // _warnings.Add(new WorkerWarning // { // LineData = vr.ToString(), // ErrorMessage = // "Attribute Value can not be null." // }); // validImportRecords.Remove(vr); // }); //} private void ProcessMultivalues(IEnumerable <SkuAttributeValueInterchangeRecord> multiValues) { CurrentLogWriter.DebugFormat("{0}: Processing Multi-Value records", Arguments.Id); var existingEntityInfos = new List <EntityInfo>(); var results = multiValues.GroupBy(mv => new { mv.ItemID, mv.AttributeName }, (key, group) => new { key.ItemID, key.AttributeName, Values = group.ToList() }); foreach (var fileGroup in results) { var line = fileGroup; var dbGroup = (from s in CurrentDbContext.Skus where s.ItemID == line.ItemID from ei in s.EntityInfos from ed in ei.EntityDatas where ed.Active && ed.Attribute.AttributeName.ToLower() == line.AttributeName.ToLower() select ed).ToList(); //if its an exact match on value+Uom var exactMatches = from fileLine in fileGroup.Values join dbLine in dbGroup on new { fileLine.Value, fileLine.Uom } equals new { dbLine.Value, dbLine.Uom } let ln = new { fileLine, dbLine } group ln by ln.fileLine into grp select new { FileLine = grp.Key, DbLines = grp.Select(g => g.dbLine) }; foreach (var exactMatch in exactMatches) { //Processing general updates if (!FieldsInFileAndDbFieldsMatches(exactMatch.DbLines, exactMatch.FileLine)) { exactMatch.DbLines.ForEach(e => e.Active = false); var ed = exactMatch.DbLines.First(); ed.EntityInfo.EntityDatas.Add(new EntityData(CurrentDbContext) { Attribute = ed.Attribute, Value = ed.Value, Uom = ed.Uom, Field1 = exactMatch.FileLine.Field1, Field2 = exactMatch.FileLine.Field2, Field3 = exactMatch.FileLine.Field3, Field4 = exactMatch.FileLine.Field4, Field5 = exactMatch.FileLine.Field5, BeforeEntity = false, CreatedRemark = CurrentRemarkId }); } //remove the item from the eds dbGroup.RemoveAll(exactMatch.DbLines); //TODO: remove from file list?? _ignoredCountMultiValues++; fileGroup.Values.Remove(exactMatch.FileLine); } //case 1 if (_replaceExistingValues) { //add to the entity info existingEntityInfos = dbGroup.OrderBy(e => e.Value).Select(e => e.EntityInfo).ToList(); foreach (var entityData in dbGroup) { //deactivate all in dbs entityData.Active = false; } } if (existingEntityInfos.Count == 0 && !CurrentImportOptions.HasFlag(ImportOptions.CreateMissingValues)) { continue; } //case 2 // var currentEntityInfoIndex = 0; foreach (var itemFromFile in fileGroup.Values) { //create new entity data if (itemFromFile == null) { continue; } EntityInfo currentEntityInfo; if (existingEntityInfos.Count != 0) { currentEntityInfo = existingEntityInfos.First(); } else { var currentSku = CurrentDbContext.Skus .FirstOrDefault( s => s.ItemID == itemFromFile.ItemID); if (currentSku == null) { _warnings.Add(new WorkerWarning { LineData = itemFromFile.ToString(), ErrorMessage = Resources.ItemDoesNotExistWarningMessage// Resources.AttributeDoesNotExistWarningMessage }); continue; } else { currentEntityInfo = new EntityInfo(CurrentDbContext) { Sku = currentSku }; CurrentDbContext.EntityInfos.InsertOnSubmit(currentEntityInfo); } } var attributeFromName = Attribute.GetAttributeFromName(CurrentDbContext, itemFromFile.AttributeName, false, useChache: false); if (attributeFromName == null) { _warnings.Add(new WorkerWarning { LineData = itemFromFile.ToString(), ErrorMessage = Resources.AttributeDoesNotExistWarningMessage }); continue; } var attId = attributeFromName.ID; currentEntityInfo.EntityDatas.Add(new EntityData(CurrentDbContext) { AttributeID = attId, Value = itemFromFile.Value, Uom = itemFromFile.Uom, Field1 = itemFromFile.Field1, Field2 = itemFromFile.Field2, Field3 = itemFromFile.Field3, Field4 = itemFromFile.Field4, Field5 = itemFromFile.Field5, BeforeEntity = false, CreatedRemark = CurrentRemarkId }); _successCountForMultiValues++; if (existingEntityInfos.Contains(currentEntityInfo)) { existingEntityInfos.Remove(currentEntityInfo); } } } SaveDataChanges(); }
private void ProcessTaxonomyNode(TaxonomyInfo parent, TaxonomyInfo node) { try { StatusMessage = string.Format("Processing {0}", node); var taxonomyNode = TaxonomyNode.FromValues(node.TaxonomyData.CreatedOn, node.ID, node.NodeName, parent != null ? parent.ID : _project.ID, _project.ID); if (node.NodeType == TaxonomyInfo.NodeTypeDerived) { //TODO: Convert Query from Arya Format to Bridge Format taxonomyNode.DerivedNodeDefinition = new SkuQueryType { SourceNode = new SkuQueryTypeSourceNode { NodeId = node.ID }, SelectionCriterias = new List <SelectionCriteriaType> { new SelectionCriteriaType { lang = EnUs, Value = (node .DerivedTaxonomies .FirstOrDefault () ?? new DerivedTaxonomy ()) .Expression .Value } } }; } ProcessTaxonomyMetaDatas(node, taxonomyNode); //if (ExportAuditTrail) // ProcessTaxonomyNodeTrail(node); ProcessTaxonomySchemas(node); ProcessTaxonomySkus(node); ProcessTaxonomyChildren(node); taxonomyNode.SerializeObject(GetSaveFilePath("Node", node.ID.ToString())); } catch (Exception exception) { var message = string.Empty; var ex = exception; while (ex != null) { message += ex.Message + Environment.NewLine; message += ex.StackTrace + Environment.NewLine; ex = ex.InnerException; } CurrentLogWriter.Warn("There was a problem processing node." + Environment.NewLine + message); } }
public override void Run() { CurrentLogWriter.DebugFormat("{0}: Starting Import Worker", Arguments.Id); try { State = WorkerState.Working; var importArgs = (ImportArgs)Arguments; var childWorkerSummaries = new List <WorkerSummary>(); var importFilePath = importArgs.InputFilePath; List <ImportWorkerBase> eligibleImports = GetEligibleWorkerAndData(importArgs, importFilePath); if (DuplicateTaxonomyIndb(importArgs)) { var newException = new Exception("Duplicate TaxonomyPath in the database."); Summary.SetError(newException); return; } foreach (var eligibleImport in eligibleImports) { var currentEligibleImport = eligibleImport; var importName = currentEligibleImport.GetType().Name; CurrentLogWriter.DebugFormat("{0}: Starting {1}", Arguments.Id, importName); //this is what should be logged from the external process launcher currentEligibleImport.WorkerStatusChange += (s, a) => CurrentLogWriter.DebugFormat("{0}: {1} : State={2}, Message={3}", Arguments.Id, s.GetType(), a.CurrentState, a.StatusMessage); currentEligibleImport.CurrentLogWriter = CurrentLogWriter; StatusMessage = "Running " + importName; currentEligibleImport.Run(); childWorkerSummaries.Add(currentEligibleImport.Summary); if (currentEligibleImport.Summary.HasError) { CurrentLogWriter.DebugFormat("{0}: HasError in {1}", Arguments.Id, importName); // State = WorkerState.Error; break; } CurrentLogWriter.DebugFormat("{0}: Finished {1}", Arguments.Id, importName); } Summary.ChildrenWorkerSummaries = childWorkerSummaries; if (eligibleImports.Any()) { State = childWorkerSummaries.Max(cs => cs.State); } else { State = WorkerState.Error; Summary.SetError(new Exception("No eligible imports found")); } } catch (Exception ex) { CurrentLogWriter.DebugFormat("{0}: Error: {1}", Arguments.Id, ex.Message); Summary.SetError(ex); } CurrentLogWriter.DebugFormat("{0}: Finished Import Worker", Arguments.Id); }
private List <ImportWorkerBase> GetEligibleWorkerAndData(ImportArgs importArgs, string importFilePath) { CurrentLogWriter.DebugFormat("{0}: Starting Get Eligible Worker And Data", Arguments.Id); var eligibleImports = new List <ImportWorkerBase>(); var availableImports = ImportWorkerBase.GetAvailableImports(); var fileName = Path.GetFileName(importArgs.InputFilePath); if (fileName == null) { return(new List <ImportWorkerBase>()); } foreach (var importWorker in availableImports) { var currentImportWorker = importWorker; currentImportWorker.CurrentFieldMappings = new Dictionary <string, int>(); var requiredTCurrentImportWorkerFields = currentImportWorker.GetRequiredFields(); if (fileName.EndsWith(".xml")) { if (ImportRecords == null) { CurrentLogWriter.DebugFormat("{0}: Reading XML Data", Arguments.Id); var str = new StreamReader(importArgs.InputFilePath); var xSerializer = new XmlSerializer(typeof(CombinedInterchangeData)); ImportRecords = (CombinedInterchangeData)xSerializer.Deserialize(str); } } else { if (ImportRecords == null) { ImportRecords = new CombinedInterchangeData(true); } //eligible only if all the required field maps are available. if (!requiredTCurrentImportWorkerFields.All(p => importArgs.FieldMappings.Keys.Contains(p))) { Summary.StatusMessage = string.Format("Not Eligible for Import : {0}", currentImportWorker.GetType()); continue; } foreach (var requiredCurrentImportWorkerField in requiredTCurrentImportWorkerFields) { currentImportWorker.CurrentFieldMappings.Add(requiredCurrentImportWorkerField, importArgs.FieldMappings[requiredCurrentImportWorkerField]); } //map all the optional fields var optionalImportWorkerFields = currentImportWorker.GetOptionalFields(); foreach ( var optionalImportWorkerField in optionalImportWorkerFields.Where(importArgs.FieldMappings.ContainsKey)) { currentImportWorker.CurrentFieldMappings.Add(optionalImportWorkerField, importArgs.FieldMappings[optionalImportWorkerField]); } var conf = GetCurrentConfiguration(importArgs, currentImportWorker.CurrentInterchangeRecordType); //MOve this to base to somewhere common using (var csvReader = new CsvReader(File.OpenText(importArgs.InputFilePath), conf)) { CurrentLogWriter.DebugFormat("{0}: Reading Text Data for {1}", Arguments.Id, currentImportWorker.GetType().Name); var inportData = csvReader.GetRecordsWithNulls(currentImportWorker.CurrentInterchangeRecordType).ToList(); var method = ImportRecords.GetType().GetMethod("AddRecords"); var generic = method.MakeGenericMethod(currentImportWorker.CurrentInterchangeRecordType); generic.Invoke(ImportRecords, new object[] { inportData }); } } eligibleImports.Add(currentImportWorker); currentImportWorker.CurrentImportOptions = importArgs.CurrentImportOptions; currentImportWorker.CurrentProjectId = importArgs.ProjectId; currentImportWorker.ImportRequestedBy = importArgs.UserId; currentImportWorker.InputFilePath = importFilePath; currentImportWorker.FieldDelimiter = importArgs.FieldDelimiter; currentImportWorker.CurrentFieldMappings = new Dictionary <string, int>(); currentImportWorker.JobDescription = importArgs.JobDescription; currentImportWorker.ImportData = ImportRecords; } ImportRecords.DedupLists(); CurrentLogWriter.DebugFormat("{0}: Eligible Workers:", Arguments.Id); foreach (var import in eligibleImports) { CurrentLogWriter.Debug(import.GetType().Name); } return(eligibleImports); }