/// <summary> /// Import Local Areas /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(Area[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); Area[] legacyItems = (Area[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } Debug.WriteLine("Importing LocalArea Data. Total Records: " + legacyItems.Length); foreach (Area item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Area_Id.ToString()); // new entry if (importMap == null && item.Area_Id > 0) { LocalArea localArea = null; CopyToInstance(dbContext, item, ref localArea, systemId); ImportUtility.AddImportMap(dbContext, OldTable, item.Area_Id.ToString(), NewTable, localArea.Id); } } performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
/// <summary> /// Import existing Cities /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> private static void ImportCities(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } int maxCityIndex = 0; if (dbContext.Cities.Any()) { maxCityIndex = dbContext.Cities.Max(x => x.Id); } try { string rootAttr = "ArrayOf" + OldTable; performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(HetsCity[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); HetsCity[] legacyItems = (HetsCity[])ser.Deserialize(memoryStream); foreach (HetsCity item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.City_Id.ToString()); // new entry if (importMap == null) { City city = null; CopyToInstance(dbContext, item, ref city, systemId, ref maxCityIndex); ImportUtility.AddImportMap(dbContext, OldTable, item.City_Id.ToString(), NewTable, city.Id); } } performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
public static void Obfuscate(PerformContext performContext, DbAppContext dbContext, string sourceLocation, string destinationLocation, string systemId) { int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, "Obfuscate_" + OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Obfuscating " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(Block[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, sourceLocation, rootAttr); Block[] legacyItems = (Block[])ser.Deserialize(memoryStream); performContext.WriteLine("Obfuscating Block data"); progress.SetValue(0); foreach (Block item in legacyItems.WithProgress(progress)) { item.Created_By = systemId; item.Closed_Comments = ImportUtility.ScrambleString(item.Closed_Comments); } performContext.WriteLine("Writing " + XmlFileName + " to " + destinationLocation); // write out the array FileStream fs = ImportUtility.GetObfuscationDestination(XmlFileName, destinationLocation); ser.Serialize(fs, legacyItems); fs.Close(); // no excel for Block. } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
public static void Obfuscate(PerformContext performContext, DbAppContext dbContext, string sourceLocation, string destinationLocation, string systemId) { int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, "Obfuscate_" + OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Obfuscating " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipType[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, sourceLocation, rootAttr); EquipType[] legacyItems = (EquipType[])ser.Deserialize(memoryStream); // no fields to mask for equipment type - straight copy performContext.WriteLine("Writing " + XmlFileName + " to " + destinationLocation); // write out the array. FileStream fs = ImportUtility.GetObfuscationDestination(XmlFileName, destinationLocation); ser.Serialize(fs, legacyItems); fs.Close(); // no excel for DumpTruck. } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Users /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } // manage the id value for new user records int maxUserIndex = 0; if (dbContext.Users.Any()) { maxUserIndex = dbContext.Users.Max(x => x.Id); } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(UserHets[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); UserHets[] legacyItems = (UserHets[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } // create an array of names using the created by and modified by values in the data performContext.WriteLine("Extracting first and last names"); progress.SetValue(0); Dictionary <string, string> firstNames = new Dictionary <string, string>(); Dictionary <string, string> lastNames = new Dictionary <string, string>(); foreach (UserHets item in legacyItems.WithProgress(progress)) { string name = item.Created_By; GetNameParts(name, ref firstNames, ref lastNames); name = item.Modified_By; GetNameParts(name, ref firstNames, ref lastNames); } // import the data performContext.WriteLine("Importing User Data"); progress.SetValue(0); foreach (UserHets item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Popt_Id.ToString()); // new entry if (importMap == null && item.Popt_Id != null) { string username = NormalizeUserCode(item.User_Cd); string firstName = GetNamePart(username, firstNames); string lastName = GetNamePart(username, lastNames); username = username.ToLower(); User instance = dbContext.Users.FirstOrDefault(x => x.SmUserId == username); // if the user exists - move to the next record if (instance != null) { continue; } CopyToInstance(dbContext, item, ref instance, systemId, username, firstName, lastName, ref maxUserIndex); if (instance != null) { ImportUtility.AddImportMap(dbContext, OldTable, item.Popt_Id, NewTable, instance.Id); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { string temp = string.Format("Error saving data (UserIndex: {0}): {1}", maxUserIndex, e.Message); performContext.WriteLine(temp); throw new DataException(temp); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
/// <summary> /// Import Local Areas /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(ImportModels.Area[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); ImportModels.Area[] legacyItems = (ImportModels.Area[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (Area item in legacyItems.WithProgress(progress)) { LocalArea localArea = null; // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Area_Id.ToString()); if (dbContext.LocalAreas.Count(x => String.Equals(x.Name, item.Area_Desc.Trim(), StringComparison.CurrentCultureIgnoreCase)) > 0) { localArea = dbContext.LocalAreas.FirstOrDefault(x => x.Name.ToUpper() == item.Area_Desc.Trim().ToUpper()); } // new entry if (importMap == null || dbContext.LocalAreas.Count(x => String.Equals(x.Name, item.Area_Desc.Trim(), StringComparison.CurrentCultureIgnoreCase)) == 0) { if (item.Area_Id > 0) { CopyToInstance(dbContext, item, ref localArea, systemId); ImportUtility.AddImportMap(dbContext, OldTable, item.Area_Id.ToString(), NewTable, localArea.Id); } } else // update { localArea = dbContext.LocalAreas.FirstOrDefault(x => x.Id == importMap.NewKey); // record was deleted if (localArea == null) { CopyToInstance(dbContext, item, ref localArea, systemId); // update the import map importMap.NewKey = localArea.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update { CopyToInstance(dbContext, item, ref localArea, systemId); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 250 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Rotaion List /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(Block[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); Block[] legacyItems = (Block[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (Block item in legacyItems.WithProgress(progress)) { int areaId = item.Area_Id ?? 0; int equipmentTypeId = item.Equip_Type_Id ?? 0; int blockNum = Convert.ToInt32(float.Parse(item.Block_Num ?? "0.0")); // this is for conversion record hope this is unique string oldUniqueId = ((areaId * 10000 + equipmentTypeId) * 100 + blockNum).ToString(); // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == oldUniqueId); // new entry if (importMap == null) { if (areaId > 0) { LocalAreaRotationList instance = null; CopyToInstance(dbContext, item, ref instance, systemId); ImportUtility.AddImportMap(dbContext, OldTable, oldUniqueId, NewTable, instance.Id); } } else // update { LocalAreaRotationList instance = dbContext.LocalAreaRotationLists.FirstOrDefault(x => x.Id == importMap.NewKey); // record was deleted if (instance == null) { CopyToInstance(dbContext, item, ref instance, systemId); // update the import map importMap.NewKey = instance.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update { CopyToInstance(dbContext, item, ref instance, systemId); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 500 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
public static void Obfuscate(PerformContext performContext, DbAppContext dbContext, string sourceLocation, string destinationLocation, string systemId) { int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, "Obfuscate_" + OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Obfuscating " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(Project[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, sourceLocation, rootAttr); Project[] legacyItems = (Project[])ser.Deserialize(memoryStream); performContext.WriteLine("Obfuscating Project data"); progress.SetValue(0); List <ImportMapRecord> importMapRecords = new List <ImportMapRecord>(); foreach (Project item in legacyItems.WithProgress(progress)) { item.Created_By = systemId; Random random = new Random(); string newProjectNum = random.Next(10000).ToString(); ImportMapRecord importMapRecordOrganization = new ImportMapRecord { TableName = NewTable, MappedColumn = "Project_Num", OriginalValue = item.Project_Num, NewValue = newProjectNum }; importMapRecords.Add(importMapRecordOrganization); item.Project_Num = newProjectNum; item.Job_Desc1 = ImportUtility.ScrambleString(item.Job_Desc1); item.Job_Desc2 = ImportUtility.ScrambleString(item.Job_Desc2); } performContext.WriteLine("Writing " + XmlFileName + " to " + destinationLocation); // write out the array. FileStream fs = ImportUtility.GetObfuscationDestination(XmlFileName, destinationLocation); ser.Serialize(fs, legacyItems); fs.Close(); // write out the spreadsheet of import records. ImportUtility.WriteImportRecordsToExcel(destinationLocation, importMapRecords, OldTable); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
static public void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // Check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, oldTable_Progress, BCBidImport.sigId); if (startPoint == BCBidImport.sigId) // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + oldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(User_HETS[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.User_HETS[] legacyItems = (HETSAPI.Import.User_HETS[])ser.Deserialize(memoryStream); int ii = startPoint; if (startPoint > 0) // Skip the portion already processed { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already. ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == item.Popt_Id.ToString()); Models.User instance = dbContext.Users.FirstOrDefault(x => item.User_Cd.ToUpper().IndexOf(x.SmUserId.ToUpper()) >= 0); if (instance == null) { CopyToInstance(performContext, dbContext, item, ref instance, systemId); if (importMap == null && instance != null) // new entry { ImportUtility.AddImportMap(dbContext, oldTable, item.Popt_Id.ToString(), newTable, instance.Id); } ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, (++ii).ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } //else // update //{ // instance = dbContext.Users.FirstOrDefault(x => x.Id == importMap.NewKey); // if (instance == null) // record was deleted // { // CopyToInstance(performContext, dbContext, item, ref instance, systemId); // // update the import map. // importMap.NewKey = instance.Id; // dbContext.ImportMaps.Update(importMap); // } // else // ordinary update. // { // CopyToInstance(performContext, dbContext, item, ref instance, systemId); // // touch the import map. // importMap.LastUpdateTimestamp = DateTime.UtcNow; // dbContext.ImportMaps.Update(importMap); // } //} } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } try { performContext.WriteLine("*** Importing " + xmlFileName + " is Done ***"); ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, BCBidImport.sigId.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } }
/// <summary> /// Import Owner Records /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } List <Owner> data = new List <Owner>(); int maxOwnerIndex = dbContext.Owners.Max(x => x.Id); int maxContactIndex = dbContext.Contacts.Max(x => x.Id); try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(ImportModels.Owner[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); ImportModels.Owner[] legacyItems = (ImportModels.Owner[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (ImportModels.Owner item in legacyItems.WithProgress(progress)) { // see if we have this one already. ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Popt_Id.ToString()); // new entry if (importMap == null) { Owner owner = null; CopyToInstance(dbContext, item, ref owner, systemId, ref maxOwnerIndex, ref maxContactIndex); data.Add(owner); ImportUtility.AddImportMap(dbContext, OldTable, item.Popt_Id.ToString(), NewTable, owner.Id); } else // update { Owner owner = dbContext.Owners.FirstOrDefault(x => x.Id == importMap.NewKey); if (owner == null) // record was deleted { CopyToInstance(dbContext, item, ref owner, systemId, ref maxOwnerIndex, ref maxContactIndex); // update the import map importMap.NewKey = owner.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToInstance(dbContext, item, ref owner, systemId, ref maxOwnerIndex, ref maxContactIndex); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 500 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Rotations /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(RotationDoc[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); RotationDoc[] legacyItems = (RotationDoc[])ser.Deserialize(memoryStream); //Use this list to save a trip to query database in each iteration List <Equipment> equips = dbContext.Equipments .Include(x => x.DumpTruck) .Include(x => x.DistrictEquipmentType) .ToList(); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (RotationDoc item in legacyItems.WithProgress(progress)) { // see if we have this one already string oldKey = item.Equip_Id + item.Note_Dt + item.Created_Dt; ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == oldKey); // new entry if (importMap == null) { Note instance = null; CopyToInstance(dbContext, item, ref instance, equips); ImportUtility.AddImportMap(dbContext, OldTable, oldKey, NewTable, instance.Id); } else // update { Note instance = dbContext.Notes.FirstOrDefault(x => x.Id == importMap.NewKey); // record was deleted if (instance == null) { CopyToInstance(dbContext, item, ref instance, equips); // update the import map importMap.NewKey = instance.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToInstance(dbContext, item, ref instance, equips); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 1000 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Recalculate the block assignment for each piece of equipment /// </summary> /// <param name="performContext"></param> /// <param name="seniorityScoringRules"></param> /// <param name="dbContext"></param> /// <param name="systemId"></param> public static void ProcessBlocks(PerformContext performContext, string seniorityScoringRules, DbAppContext dbContext, string systemId) { try { performContext.WriteLine("*** Recalculating Equipment Block Assignment ***"); Debug.WriteLine("Recalculating Equipment Block Assignment"); int ii = 0; string _oldTableProgress = "BlockAssignment_Progress"; string _newTable = "BlockAssignment"; // check if the block assignment has already been completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, _oldTableProgress, BcBidImport.SigId, _newTable); if (startPoint == BcBidImport.SigId) // this means the assignment job is complete { performContext.WriteLine("*** Recalculating Equipment Block Assignment is complete from the former process ***"); return; } // ************************************************************ // cleanup old block assignment status records // ************************************************************ List <ImportMap> importMapList = dbContext.ImportMaps .Where(x => x.OldTable == _oldTableProgress && x.NewTable == _newTable) .ToList(); foreach (ImportMap importMap in importMapList) { dbContext.ImportMaps.Remove(importMap); } dbContext.SaveChanges(); // ************************************************************ // get processing rules // ************************************************************ SeniorityScoringRules scoringRules = new SeniorityScoringRules(seniorityScoringRules); // ************************************************************ // get all local areas // (using active equipment to minimize the results) // ************************************************************ List <LocalArea> localAreas = dbContext.Equipments .Include(x => x.LocalArea) .Where(x => x.Status == Equipment.StatusApproved && x.ArchiveCode == "N") .Select(x => x.LocalArea) .Distinct() .ToList(); // ************************************************************ // get all district equipment types // (using active equipment to minimize the results) // ************************************************************ List <DistrictEquipmentType> equipmentTypes = dbContext.Equipments .Include(x => x.DistrictEquipmentType) .Where(x => x.Status == Equipment.StatusApproved && x.ArchiveCode == "N") .Select(x => x.DistrictEquipmentType) .Distinct() .ToList(); // ************************************************************************ // iterate the data and update the assugnment blocks // (seniority is already calculated) // ************************************************************************ Debug.WriteLine("Recalculating Equipment Block Assignment - Local Area Record Count: " + localAreas.Count); foreach (LocalArea localArea in localAreas) { foreach (DistrictEquipmentType districtEquipmentType in equipmentTypes) { // get the associated equipment type EquipmentType equipmentTypeRecord = dbContext.EquipmentTypes.FirstOrDefault(x => x.Id == districtEquipmentType.EquipmentTypeId); if (equipmentTypeRecord == null) { throw new DataException(string.Format("Invalid District Equipment Type. No associated Equipment Type record (District Equipment Id: {0})", districtEquipmentType.Id)); } // get rules int blockSize = equipmentTypeRecord.IsDumpTruck ? scoringRules.GetBlockSize("DumpTruck") : scoringRules.GetBlockSize(); int totalBlocks = equipmentTypeRecord.IsDumpTruck ? scoringRules.GetTotalBlocks("DumpTruck") : scoringRules.GetTotalBlocks(); // assign blocks SeniorityListExtensions.AssignBlocks(dbContext, localArea.Id, districtEquipmentType.Id, blockSize, totalBlocks, false); // save change to database periodically to avoid frequent writing to the database if (ii++ % 1000 == 0) { try { Debug.WriteLine("Recalculating Equipment Block Assignment - Index: " + ii); ImportUtility.AddImportMapForProgress(dbContext, _oldTableProgress, ii.ToString(), BcBidImport.SigId, _newTable); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } } // ************************************************************ // save final set of updates // ************************************************************ try { performContext.WriteLine("*** Recalculating Equipment Block Assignment is Done ***"); Debug.WriteLine("Recalculating Equipment Block Assignment is Done"); ImportUtility.AddImportMapForProgress(dbContext, _oldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, _newTable); dbContext.SaveChangesForImport(); } catch (Exception e) { string temp = string.Format("Error saving data (Record: {0}): {1}", ii, e.Message); performContext.WriteLine(temp); throw new DataException(temp); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
static public void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // Check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, oldTable_Progress, BCBidImport.sigId); if (startPoint == BCBidImport.sigId) // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + oldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipType[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.EquipType[] legacyItems = (HETSAPI.Import.EquipType[])ser.Deserialize(memoryStream); int ii = 0; foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already. ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == item.Equip_Type_Id.ToString()); if (importMap == null) // new entry { if (item.Equip_Type_Id > 0) { Models.EquipmentType instance = null; CopyToInstance(performContext, dbContext, item, ref instance, systemId); ImportUtility.AddImportMap(dbContext, oldTable, item.Equip_Type_Id.ToString(), newTable, instance.Id); } } else // update { Models.EquipmentType instance = dbContext.EquipmentTypes.FirstOrDefault(x => x.Id == importMap.NewKey); if (instance == null) // record was deleted { CopyToInstance(performContext, dbContext, item, ref instance, systemId); // update the import map. importMap.NewKey = instance.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToInstance(performContext, dbContext, item, ref instance, systemId); // touch the import map. importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } if (ii++ % 500 == 0) // Save change to database once a while to avoid frequent writing to the database. { try { ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, ii.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } } } try { performContext.WriteLine("*** Importing " + xmlFileName + " is Done ***"); ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, BCBidImport.sigId.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import from Equip_Usage.xml file to Three tables: /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> static public void Import(PerformContext performContext, DbContextOptionsBuilder <DbAppContext> options, DbAppContext dbContext, string fileLocation, string systemId) { // Check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, oldTable_Progress, BCBidImport.sigId); if (startPoint == BCBidImport.sigId) // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } string rootAttr = "ArrayOf" + oldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipUsage[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.EquipUsage[] legacyItems = (HETSAPI.Import.EquipUsage[])ser.Deserialize(memoryStream); //Use this list to save a trip to query database in each iteration List <Models.Equipment> equips = dbContext.Equipments .Include(x => x.DumpTruck) .Include(x => x.DistrictEquipmentType) .ToList(); int ii = startPoint; if (startPoint > 0) // Skip the portion already processed { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already. string oldKey = (item.Equip_Id ?? 0).ToString() + (item.Project_Id ?? 0).ToString() + (item.Service_Area_Id ?? 0).ToString(); string workedDate = item.Worked_Dt.Trim().Substring(0, 10); string note = oldKey + "-" + workedDate.Substring(0, 4); string oldKeyAll = oldKey + "-" + workedDate.Substring(0, 10); ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == oldKeyAll); if (importMap == null) // new entry { if (item.Equip_Id > 0) { Models.RentalAgreement rentalAgreement = dbContext.RentalAgreements.FirstOrDefault(x => x.Note == note); CopyToTimeRecorded(performContext, dbContext, item, ref rentalAgreement, note, workedDate, equips, systemId); ImportUtility.AddImportMap(dbContext, oldTable, oldKeyAll, newTable, rentalAgreement.Id); } } else // update { Models.RentalAgreement rentalAgreement = dbContext.RentalAgreements.FirstOrDefault(x => x.Id == importMap.NewKey); if (rentalAgreement == null) // record was deleted { CopyToTimeRecorded(performContext, dbContext, item, ref rentalAgreement, note, workedDate, equips, systemId); // update the import map. importMap.NewKey = rentalAgreement.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToTimeRecorded(performContext, dbContext, item, ref rentalAgreement, note, workedDate, equips, systemId); // touch the import map. importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } if (++ii % 1000 == 0) // Save change to database once a while to avoid frequent writing to the database. { try { ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, ii.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); options = new DbContextOptionsBuilder <DbAppContext>(); dbContext = new DbAppContext(null, options.Options); } catch (Exception e) { string iStr = e.ToString(); } } } try { performContext.WriteLine("*** Importing " + xmlFileName + " is Done ***"); ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, BCBidImport.sigId.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } }
public static void Obfuscate(PerformContext performContext, DbAppContext dbContext, string sourceLocation, string destinationLocation, string systemId) { int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, "Obfuscate_" + OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Obfuscating " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(ImportModels.Owner[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, sourceLocation, rootAttr); ImportModels.Owner[] legacyItems = (ImportModels.Owner[])ser.Deserialize(memoryStream); performContext.WriteLine("Obfuscating owner data"); progress.SetValue(0); int currentOwner = 0; List <ImportMapRecord> importMapRecords = new List <ImportMapRecord>(); foreach (ImportModels.Owner item in legacyItems.WithProgress(progress)) { item.Created_By = systemId; if (item.Modified_By != null) { item.Modified_By = systemId; } ImportMapRecord importMapRecordOrganization = new ImportMapRecord { TableName = NewTable, MappedColumn = "OrganizationName", OriginalValue = item.CGL_Company, NewValue = "Company " + currentOwner }; importMapRecords.Add(importMapRecordOrganization); ImportMapRecord importMapRecordFirstName = new ImportMapRecord { TableName = NewTable, MappedColumn = "Owner_First_Name", OriginalValue = item.Owner_First_Name, NewValue = "OwnerFirst" + currentOwner }; importMapRecords.Add(importMapRecordFirstName); ImportMapRecord importMapRecordLastName = new ImportMapRecord { TableName = NewTable, MappedColumn = "Owner_Last_Name", OriginalValue = item.Owner_Last_Name, NewValue = "OwnerLast" + currentOwner }; importMapRecords.Add(importMapRecordLastName); ImportMapRecord importMapRecordOwnerCode = new ImportMapRecord { TableName = NewTable, MappedColumn = "Owner_Cd", OriginalValue = item.Owner_Cd, NewValue = "OO" + currentOwner }; importMapRecords.Add(importMapRecordOwnerCode); item.Owner_Cd = "OO" + currentOwner; item.Owner_First_Name = "OwnerFirst" + currentOwner; item.Owner_Last_Name = "OwnerLast" + currentOwner; item.Contact_Person = ImportUtility.ScrambleString(item.Contact_Person); item.Comment = ImportUtility.ScrambleString(item.Comment); item.WCB_Num = ImportUtility.ScrambleString(item.WCB_Num); item.CGL_Company = ImportUtility.ScrambleString(item.CGL_Company); item.CGL_Policy = ImportUtility.ScrambleString(item.CGL_Policy); currentOwner++; } performContext.WriteLine("Writing " + XmlFileName + " to " + destinationLocation); // write out the array FileStream fs = ImportUtility.GetObfuscationDestination(XmlFileName, destinationLocation); ser.Serialize(fs, legacyItems); fs.Close(); // write out the spreadsheet of import records ImportUtility.WriteImportRecordsToExcel(destinationLocation, importMapRecords, OldTable); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
static public void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // Check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, oldTable_Progress, BCBidImport.sigId); if (startPoint == BCBidImport.sigId) // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + oldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipType[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.EquipType[] legacyItems = (HETSAPI.Import.EquipType[])ser.Deserialize(memoryStream); int ii = startPoint; if (startPoint > 0) // Skip the portion already processed { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (var item in legacyItems.WithProgress(progress)) { string serviceAreaName = ""; // see if we have this one already. ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == item.Equip_Type_Id.ToString()); float equip_Rental_rate_No; try { equip_Rental_rate_No = (float)Decimal.Parse(item.Equip_Rental_Rate_No, System.Globalization.NumberStyles.Any); } catch (Exception e) { equip_Rental_rate_No = (float)0.1; } string description; try { description = item.Equip_Type_Desc.Length >= 225 ? item.Equip_Type_Desc.Substring(0, 225) : item.Equip_Type_Desc; } catch (Exception e) { description = ""; } if (importMap == null) // new entry { if (item.Equip_Type_Id > 0) { Models.DistrictEquipmentType instance = null; serviceAreaName = CopyToInstance(performContext, dbContext, item, ref instance, systemId, equip_Rental_rate_No, description); AddingDistrictEquipmentTypeInstance(dbContext, item, instance, equip_Rental_rate_No, description, serviceAreaName, true); } } else // update { Models.DistrictEquipmentType instance = dbContext.DistrictEquipmentTypes.FirstOrDefault(x => x.Id == importMap.NewKey); if (instance == null) // record was deleted { serviceAreaName = CopyToInstance(performContext, dbContext, item, ref instance, systemId, equip_Rental_rate_No, description); AddingDistrictEquipmentTypeInstance(dbContext, item, instance, equip_Rental_rate_No, description, serviceAreaName, false); // update the import map. importMap.NewKey = instance.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { serviceAreaName = CopyToInstance(performContext, dbContext, item, ref instance, systemId, equip_Rental_rate_No, description); AddingDistrictEquipmentTypeInstance(dbContext, item, instance, equip_Rental_rate_No, description, serviceAreaName, false); // touch the import map. importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } if (++ii % 250 == 0) // Save change to database once a while to avoid frequent writing to the database. { try { ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, ii.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } } } try { performContext.WriteLine("*** Importing " + xmlFileName + " is Done ***"); ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, BCBidImport.sigId.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Equipment Types /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } int maxEquipTypeIndex = 0; if (dbContext.EquipmentTypes.Any()) { maxEquipTypeIndex = dbContext.EquipmentTypes.Max(x => x.Id); } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipType[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); EquipType[] legacyItems = (EquipType[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } Debug.WriteLine("Importing EquipmentType Data. Total Records: " + legacyItems.Length); foreach (EquipType item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Equip_Type_Id.ToString()); // new entry (only import new records of Equipment Type) if (importMap == null && item.Equip_Type_Id > 0) { EquipmentType equipType = null; CopyToInstance(dbContext, item, ref equipType, systemId, ref maxEquipTypeIndex); if (equipType != null) { ImportUtility.AddImportMap(dbContext, OldTable, item.Equip_Type_Id.ToString(), NewTable, equipType.Id); } } // save change to database periodically to avoid frequent writing to the database if (ii++ % 500 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { string temp = string.Format("Error saving data (EquipmentTypeIndex: {0}): {1}", maxEquipTypeIndex, e.Message); performContext.WriteLine(temp); throw new DataException(temp); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
/// <summary> /// Import Users /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(UserHets[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); UserHets[] legacyItems = (UserHets[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (UserHets item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Popt_Id.ToString()); User instance = dbContext.Users.FirstOrDefault(x => item.User_Cd.ToUpper().IndexOf(x.SmUserId.ToUpper(), StringComparison.Ordinal) >= 0); if (instance == null) { CopyToInstance(dbContext, item, ref instance, systemId); // new entry if (importMap == null && instance != null) { ImportUtility.AddImportMap(dbContext, OldTable, item.Popt_Id.ToString(), NewTable, instance.Id); } ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, (++ii).ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }