/// <summary> /// Import Local Areas /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(Area[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); Area[] legacyItems = (Area[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } Debug.WriteLine("Importing LocalArea Data. Total Records: " + legacyItems.Length); foreach (Area item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Area_Id.ToString()); // new entry if (importMap == null && item.Area_Id > 0) { LocalArea localArea = null; CopyToInstance(dbContext, item, ref localArea, systemId); ImportUtility.AddImportMap(dbContext, OldTable, item.Area_Id.ToString(), NewTable, localArea.Id); } } performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
/// <summary> /// This is recording where the last import was stopped for specific table /// Use BCBidImport.todayDate as newTable entry /// Please note that NewTable enrty of the Import_Map table is aleats today's dat: BCBidImport.todayDate for identifying purpose. This means the restarting point inly carew /// what has done for today, not in the past. /// </summary> /// <param name="dbContext"></param> /// <param name="oldTable"></param> This is lile "Owner_Progress" for th eimport progress entry (row) of Import_Map table /// <param name="oldKey"></param> This is where stopped last time in string If this is "388888", then complete /// <param name="newKey"></param> This is always a constant; for identification of progress entry of the table only. This is extraneous. static public void AddImportMap_For_Progress(DbAppContext dbContext, string oldTable, string oldKey, int newKey) { List <Models.ImportMap> importMapList = dbContext.ImportMaps .Where(x => x.OldTable == oldTable && x.NewKey == newKey) .ToList(); if (importMapList.Count == 0) { ImportUtility.AddImportMap(dbContext, oldTable, oldKey, BCBidImport.todayDate, newKey); } else { //Sometimes there are multiple progress entries exists for the same xml file import. // In that case, the extra one should be deleted and the correct one should be updated. int maxProgressCount = importMapList.Max(t => int.Parse(t.OldKey)); foreach (ImportMap importMap in importMapList) { if (importMap.OldKey == maxProgressCount.ToString()) { importMap.NewTable = BCBidImport.todayDate; importMap.OldKey = Math.Max(int.Parse(oldKey), maxProgressCount).ToString(); importMap.LastUpdateTimestamp = DateTime.Now; dbContext.ImportMaps.Update(importMap); } else { dbContext.ImportMaps.Remove(importMap); } } } }
/// <summary> /// Insert District_Equipment_Type or not according to the rule laid out by HETS-365 /// </summary> /// <param name="dbContext"></param> /// <param name="oldObject"></param> /// <param name="instance"></param> /// <param name="equipRentalRateNo"></param> /// <param name="description"></param> /// <param name="serviceAreaName"></param> /// <param name="addImportMaps"></param> private static void AddingDistrictEquipmentTypeInstance(DbAppContext dbContext, EquipType oldObject, DistrictEquipmentType instance, float equipRentalRateNo, string description, string serviceAreaName, bool addImportMaps) { // add the instance (according to the rule of HETS-365) List <DistrictEquipmentType> disEquipTypelist = dbContext.DistrictEquipmentTypes .Where(x => x.DistrictId == instance.DistrictId) .Where(x => x.DistrictEquipmentName.Substring(0, Math.Max(0, x.DistrictEquipmentName.IndexOf(Delim, StringComparison.Ordinal))) .IndexOf(instance.DistrictEquipmentName, StringComparison.Ordinal) >= 0) .Include(x => x.EquipmentType) .ToList(); // HETS-365 Step 1 if (disEquipTypelist.Count == 0) { instance.DistrictEquipmentName += Delim + description; dbContext.DistrictEquipmentTypes.Add(instance); if (addImportMaps) { ImportUtility.AddImportMap(dbContext, OldTable, oldObject.Equip_Type_Id.ToString(), NewTable, instance.Id); } } else // HETS-365 Step 2 { List <DistrictEquipmentType> list1 = disEquipTypelist .FindAll(x => Math.Abs((x.EquipmentType.BlueBookSection ?? 0.1) - equipRentalRateNo) <= ErrowAllowed); // HETS-365 Step 2.1 if (list1.Count > 0 && addImportMaps) { ImportUtility.AddImportMap(dbContext, OldTable, oldObject.Equip_Type_Id.ToString(), NewTable, list1.OrderBy(x => x.Id).FirstOrDefault().Id); } // check if XML.Description matches any of the HETS.Descriptions List <DistrictEquipmentType> list2 = disEquipTypelist .FindAll(x => x.DistrictEquipmentName.Substring(x.DistrictEquipmentName.IndexOf(Delim, StringComparison.Ordinal) + Delim.Length) .IndexOf(description, StringComparison.Ordinal) >= 0); // HETS-365 Step 2.1 if (list2.Count > 0 && addImportMaps) { ImportUtility.AddImportMap(dbContext, OldTable, oldObject.Equip_Type_Id.ToString(), NewTable, list2.OrderBy(x => x.Id).FirstOrDefault().Id); } // HETS-365 Step 3 if (list1.Count == 0 && list2.Count == 0) { instance.DistrictEquipmentName += Delim0 + serviceAreaName + Delim + description; dbContext.DistrictEquipmentTypes.Add(instance); if (addImportMaps) { ImportUtility.AddImportMap(dbContext, OldTable, oldObject.Equip_Type_Id.ToString(), NewTable, instance.Id); } } } }
/// <summary> /// Import existing Cities /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> private static void ImportCities(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } int maxCityIndex = 0; if (dbContext.Cities.Any()) { maxCityIndex = dbContext.Cities.Max(x => x.Id); } try { string rootAttr = "ArrayOf" + OldTable; performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(HetsCity[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); HetsCity[] legacyItems = (HetsCity[])ser.Deserialize(memoryStream); foreach (HetsCity item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.City_Id.ToString()); // new entry if (importMap == null) { City city = null; CopyToInstance(dbContext, item, ref city, systemId, ref maxCityIndex); ImportUtility.AddImportMap(dbContext, OldTable, item.City_Id.ToString(), NewTable, city.Id); } } performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
/// <summary> /// This is recording where the last import was stopped for specific table /// Use BCBidImport.todayDate as newTable entry /// Please note that NewTable enrty of the Import_Map table is aleats today's dat: BCBidImport.todayDate for identifying purpose. This means the restarting point inly carew /// what has done for today, not in the past. /// </summary> /// <param name="dbContext"></param> /// <param name="oldTable"></param> This is lile "Owner_Progress" for th eimport progress entry (row) of Import_Map table /// <param name="oldKey"></param> This is where stopped last time in string If this is "300000", then complete /// <param name="newKey"></param> This is always a constant; for identification of progress entry of the table only. This is extraneous. static public void AddImportMap_For_Progress(DbAppContext dbContext, string oldTable, string oldKey, int newKey) { Models.ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.NewKey == newKey); if (importMap == null) { ImportUtility.AddImportMap(dbContext, oldTable, oldKey, BCBidImport.todayDate, newKey); } else { importMap.OldKey = oldKey; importMap.LastUpdateTimestamp = DateTime.Now; dbContext.ImportMaps.Update(importMap); } }
/// <summary> /// Insert District_Equipment_Type or not according to the rule laid out by HETS-365 /// </summary> /// <param name="dbContext"></param> /// <param name="oldObject"></param> /// <param name="instance"></param> /// <param name="equip_Rental_rate_No"></param> /// <param name="description"></param> /// <param name="serviceAreaName"></param> /// <param name="updateImportMaps"></param> static private void AddingDistrictEquipmentTypeInstance(DbAppContext dbContext, HETSAPI.Import.EquipType oldObject, Models.DistrictEquipmentType instance, float equip_Rental_rate_No, string description, string serviceAreaName, bool addImportMaps) { // Add the instance according to the rule of HETS-365 var disEquipTypelist = dbContext.DistrictEquipmentTypes .Where(x => x.DistrictId == instance.DistrictId) .Where(x => x.DistrictEquipmentName.Substring(0, Math.Max(0, x.DistrictEquipmentName.IndexOf(delim))).IndexOf(instance.DistrictEquipmentName) >= 0) .Include(x => x.EquipmentType) .ToList(); if (disEquipTypelist.Count == 0) //HETS-365 Step 1. { instance.DistrictEquipmentName += delim + description; dbContext.DistrictEquipmentTypes.Add(instance); if (addImportMaps) { ImportUtility.AddImportMap(dbContext, oldTable, oldObject.Equip_Type_Id.ToString(), newTable, instance.Id); } } else //HETS-365 Step 2. { var list1 = disEquipTypelist .FindAll(x => Math.Abs((x.EquipmentType.BlueBookSection ?? 0.1) - equip_Rental_rate_No) <= errowAllowed); // .OrderBy(x => x.Id); if (list1.Count > 0 && addImportMaps) //HETS-365 Step 2.1 { ImportUtility.AddImportMap(dbContext, oldTable, oldObject.Equip_Type_Id.ToString(), newTable, list1.OrderBy(x => x.Id).FirstOrDefault().Id); } //Check if XML.Description matches any of the HETS.Descriptions var list2 = disEquipTypelist.FindAll(x => x.DistrictEquipmentName.Substring(x.DistrictEquipmentName.IndexOf(delim) + delim.Length).IndexOf(description) > 0); if (list2.Count > 0 && addImportMaps) //HETS-365 Step 2.1 { ImportUtility.AddImportMap(dbContext, oldTable, oldObject.Equip_Type_Id.ToString(), newTable, list2.OrderBy(x => x.Id).FirstOrDefault().Id); } if (list1.Count == 0 && list2.Count == 0) //HETS-365 Step 3 { instance.DistrictEquipmentName += delim0 + serviceAreaName + delim + description; dbContext.DistrictEquipmentTypes.Add(instance); if (addImportMaps) { ImportUtility.AddImportMap(dbContext, oldTable, oldObject.Equip_Type_Id.ToString(), newTable, instance.Id); } } } }
/// <summary> /// Import Service Areas /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { string completed = DateTime.Now.ToString("d") + "-" + "Completed"; ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == completed && x.NewKey == SigId); if (importMap != null) { return; } try { string rootAttr = "ArrayOf" + OldTable; performContext.WriteLine("Processing Service Areas"); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(ServiceArea[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); ServiceArea[] legacyItems = (ServiceArea[])ser.Deserialize(memoryStream); foreach (ServiceArea item in legacyItems.WithProgress(progress)) { // see if we have this one already importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Service_Area_Id.ToString()); Models.ServiceArea serviceArea = dbContext.ServiceAreas.FirstOrDefault(x => x.Name == item.Service_Area_Desc.Trim()); if (serviceArea == null) { serviceArea = new Models.ServiceArea(); } // new entry if (importMap == null) { if (item.Service_Area_Cd > 0) { CopyToInstance(performContext, dbContext, item, ref serviceArea, systemId); ImportUtility.AddImportMap(dbContext, OldTable, item.Service_Area_Id.ToString(), NewTable, serviceArea.Id); } } else // update { // record was deleted if (serviceArea.Name == null) { CopyToInstance(performContext, dbContext, item, ref serviceArea, systemId); // update the import map importMap.NewKey = serviceArea.Id; dbContext.ImportMaps.Update(importMap); dbContext.SaveChangesForImport(); } else // ordinary update { CopyToInstance(performContext, dbContext, item, ref serviceArea, systemId); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); dbContext.SaveChangesForImport(); } } } performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMap(dbContext, OldTable, completed, NewTable, SigId); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Owner Records /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } List <Owner> data = new List <Owner>(); int maxOwnerIndex = dbContext.Owners.Max(x => x.Id); int maxContactIndex = dbContext.Contacts.Max(x => x.Id); try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(ImportModels.Owner[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); ImportModels.Owner[] legacyItems = (ImportModels.Owner[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (ImportModels.Owner item in legacyItems.WithProgress(progress)) { // see if we have this one already. ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Popt_Id.ToString()); // new entry if (importMap == null) { Owner owner = null; CopyToInstance(dbContext, item, ref owner, systemId, ref maxOwnerIndex, ref maxContactIndex); data.Add(owner); ImportUtility.AddImportMap(dbContext, OldTable, item.Popt_Id.ToString(), NewTable, owner.Id); } else // update { Owner owner = dbContext.Owners.FirstOrDefault(x => x.Id == importMap.NewKey); if (owner == null) // record was deleted { CopyToInstance(dbContext, item, ref owner, systemId, ref maxOwnerIndex, ref maxContactIndex); // update the import map importMap.NewKey = owner.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToInstance(dbContext, item, ref owner, systemId, ref maxOwnerIndex, ref maxContactIndex); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 500 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Districts /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> static public void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { string completed = DateTime.Now.ToString("d") + "-" + "Completed"; ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == completed && x.NewKey == sigId); if (importMap != null) { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + oldTable; performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(HETS_District[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.HETS_District[] legacyItems = (HETSAPI.Import.HETS_District[])ser.Deserialize(memoryStream); foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already. importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == item.District_Id.ToString()); if (importMap == null) // new entry { Models.District dis = null; CopyToInstance(performContext, dbContext, item, ref dis, systemId); ImportUtility.AddImportMap(dbContext, oldTable, item.District_Id.ToString(), newTable, dis.Id); } else // update { Models.District dis = dbContext.Districts.FirstOrDefault(x => x.Id == importMap.NewKey); if (dis == null) // record was deleted { CopyToInstance(performContext, dbContext, item, ref dis, systemId); // update the import map. importMap.NewKey = dis.Id; dbContext.ImportMaps.Update(importMap); dbContext.SaveChangesForImport(); } else // ordinary update. { CopyToInstance(performContext, dbContext, item, ref dis, systemId); // touch the import map. importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); dbContext.SaveChangesForImport(); } } } performContext.WriteLine("*** Done ***"); ImportUtility.AddImportMap(dbContext, oldTable, completed, newTable, sigId); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Users /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } // manage the id value for new user records int maxUserIndex = 0; if (dbContext.Users.Any()) { maxUserIndex = dbContext.Users.Max(x => x.Id); } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(UserHets[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); UserHets[] legacyItems = (UserHets[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } // create an array of names using the created by and modified by values in the data performContext.WriteLine("Extracting first and last names"); progress.SetValue(0); Dictionary <string, string> firstNames = new Dictionary <string, string>(); Dictionary <string, string> lastNames = new Dictionary <string, string>(); foreach (UserHets item in legacyItems.WithProgress(progress)) { string name = item.Created_By; GetNameParts(name, ref firstNames, ref lastNames); name = item.Modified_By; GetNameParts(name, ref firstNames, ref lastNames); } // import the data performContext.WriteLine("Importing User Data"); progress.SetValue(0); foreach (UserHets item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Popt_Id.ToString()); // new entry if (importMap == null && item.Popt_Id != null) { string username = NormalizeUserCode(item.User_Cd); string firstName = GetNamePart(username, firstNames); string lastName = GetNamePart(username, lastNames); username = username.ToLower(); User instance = dbContext.Users.FirstOrDefault(x => x.SmUserId == username); // if the user exists - move to the next record if (instance != null) { continue; } CopyToInstance(dbContext, item, ref instance, systemId, username, firstName, lastName, ref maxUserIndex); if (instance != null) { ImportUtility.AddImportMap(dbContext, OldTable, item.Popt_Id, NewTable, instance.Id); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { string temp = string.Format("Error saving data (UserIndex: {0}): {1}", maxUserIndex, e.Message); performContext.WriteLine(temp); throw new DataException(temp); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
/// <summary> /// Import Rotaion List /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(Block[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); Block[] legacyItems = (Block[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (Block item in legacyItems.WithProgress(progress)) { int areaId = item.Area_Id ?? 0; int equipmentTypeId = item.Equip_Type_Id ?? 0; int blockNum = Convert.ToInt32(float.Parse(item.Block_Num ?? "0.0")); // this is for conversion record hope this is unique string oldUniqueId = ((areaId * 10000 + equipmentTypeId) * 100 + blockNum).ToString(); // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == oldUniqueId); // new entry if (importMap == null) { if (areaId > 0) { LocalAreaRotationList instance = null; CopyToInstance(dbContext, item, ref instance, systemId); ImportUtility.AddImportMap(dbContext, OldTable, oldUniqueId, NewTable, instance.Id); } } else // update { LocalAreaRotationList instance = dbContext.LocalAreaRotationLists.FirstOrDefault(x => x.Id == importMap.NewKey); // record was deleted if (instance == null) { CopyToInstance(dbContext, item, ref instance, systemId); // update the import map importMap.NewKey = instance.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update { CopyToInstance(dbContext, item, ref instance, systemId); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 500 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Local Areas /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(ImportModels.Area[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); ImportModels.Area[] legacyItems = (ImportModels.Area[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (Area item in legacyItems.WithProgress(progress)) { LocalArea localArea = null; // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Area_Id.ToString()); if (dbContext.LocalAreas.Count(x => String.Equals(x.Name, item.Area_Desc.Trim(), StringComparison.CurrentCultureIgnoreCase)) > 0) { localArea = dbContext.LocalAreas.FirstOrDefault(x => x.Name.ToUpper() == item.Area_Desc.Trim().ToUpper()); } // new entry if (importMap == null || dbContext.LocalAreas.Count(x => String.Equals(x.Name, item.Area_Desc.Trim(), StringComparison.CurrentCultureIgnoreCase)) == 0) { if (item.Area_Id > 0) { CopyToInstance(dbContext, item, ref localArea, systemId); ImportUtility.AddImportMap(dbContext, OldTable, item.Area_Id.ToString(), NewTable, localArea.Id); } } else // update { localArea = dbContext.LocalAreas.FirstOrDefault(x => x.Id == importMap.NewKey); // record was deleted if (localArea == null) { CopyToInstance(dbContext, item, ref localArea, systemId); // update the import map importMap.NewKey = localArea.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update { CopyToInstance(dbContext, item, ref localArea, systemId); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 250 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
static public void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // Check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, oldTable_Progress, BCBidImport.sigId); if (startPoint == BCBidImport.sigId) // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + oldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(User_HETS[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.User_HETS[] legacyItems = (HETSAPI.Import.User_HETS[])ser.Deserialize(memoryStream); int ii = startPoint; if (startPoint > 0) // Skip the portion already processed { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already. ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == item.Popt_Id.ToString()); Models.User instance = dbContext.Users.FirstOrDefault(x => item.User_Cd.ToUpper().IndexOf(x.SmUserId.ToUpper()) >= 0); if (instance == null) { CopyToInstance(performContext, dbContext, item, ref instance, systemId); if (importMap == null && instance != null) // new entry { ImportUtility.AddImportMap(dbContext, oldTable, item.Popt_Id.ToString(), newTable, instance.Id); } ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, (++ii).ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } //else // update //{ // instance = dbContext.Users.FirstOrDefault(x => x.Id == importMap.NewKey); // if (instance == null) // record was deleted // { // CopyToInstance(performContext, dbContext, item, ref instance, systemId); // // update the import map. // importMap.NewKey = instance.Id; // dbContext.ImportMaps.Update(importMap); // } // else // ordinary update. // { // CopyToInstance(performContext, dbContext, item, ref instance, systemId); // // touch the import map. // importMap.LastUpdateTimestamp = DateTime.UtcNow; // dbContext.ImportMaps.Update(importMap); // } //} } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } try { performContext.WriteLine("*** Importing " + xmlFileName + " is Done ***"); ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, BCBidImport.sigId.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } }
/// <summary> /// Import from Equip_Usage.xml file to Three tables: /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> static public void Import(PerformContext performContext, DbContextOptionsBuilder <DbAppContext> options, DbAppContext dbContext, string fileLocation, string systemId) { // Check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, oldTable_Progress, BCBidImport.sigId); if (startPoint == BCBidImport.sigId) // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } string rootAttr = "ArrayOf" + oldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipUsage[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.EquipUsage[] legacyItems = (HETSAPI.Import.EquipUsage[])ser.Deserialize(memoryStream); //Use this list to save a trip to query database in each iteration List <Models.Equipment> equips = dbContext.Equipments .Include(x => x.DumpTruck) .Include(x => x.DistrictEquipmentType) .ToList(); int ii = startPoint; if (startPoint > 0) // Skip the portion already processed { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already. string oldKey = (item.Equip_Id ?? 0).ToString() + (item.Project_Id ?? 0).ToString() + (item.Service_Area_Id ?? 0).ToString(); string workedDate = item.Worked_Dt.Trim().Substring(0, 10); string note = oldKey + "-" + workedDate.Substring(0, 4); string oldKeyAll = oldKey + "-" + workedDate.Substring(0, 10); ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == oldKeyAll); if (importMap == null) // new entry { if (item.Equip_Id > 0) { Models.RentalAgreement rentalAgreement = dbContext.RentalAgreements.FirstOrDefault(x => x.Note == note); CopyToTimeRecorded(performContext, dbContext, item, ref rentalAgreement, note, workedDate, equips, systemId); ImportUtility.AddImportMap(dbContext, oldTable, oldKeyAll, newTable, rentalAgreement.Id); } } else // update { Models.RentalAgreement rentalAgreement = dbContext.RentalAgreements.FirstOrDefault(x => x.Id == importMap.NewKey); if (rentalAgreement == null) // record was deleted { CopyToTimeRecorded(performContext, dbContext, item, ref rentalAgreement, note, workedDate, equips, systemId); // update the import map. importMap.NewKey = rentalAgreement.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToTimeRecorded(performContext, dbContext, item, ref rentalAgreement, note, workedDate, equips, systemId); // touch the import map. importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } if (++ii % 1000 == 0) // Save change to database once a while to avoid frequent writing to the database. { try { ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, ii.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); options = new DbContextOptionsBuilder <DbAppContext>(); dbContext = new DbAppContext(null, options.Options); } catch (Exception e) { string iStr = e.ToString(); } } } try { performContext.WriteLine("*** Importing " + xmlFileName + " is Done ***"); ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, BCBidImport.sigId.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } }
/// <summary> /// Import Equipment Types /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BcBidImport.SigId, NewTable); if (startPoint == BcBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } int maxEquipTypeIndex = 0; if (dbContext.EquipmentTypes.Any()) { maxEquipTypeIndex = dbContext.EquipmentTypes.Max(x => x.Id); } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipType[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); EquipType[] legacyItems = (EquipType[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } Debug.WriteLine("Importing EquipmentType Data. Total Records: " + legacyItems.Length); foreach (EquipType item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Equip_Type_Id.ToString()); // new entry (only import new records of Equipment Type) if (importMap == null && item.Equip_Type_Id > 0) { EquipmentType equipType = null; CopyToInstance(dbContext, item, ref equipType, systemId, ref maxEquipTypeIndex); if (equipType != null) { ImportUtility.AddImportMap(dbContext, OldTable, item.Equip_Type_Id.ToString(), NewTable, equipType.Id); } } // save change to database periodically to avoid frequent writing to the database if (ii++ % 500 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BcBidImport.SigId.ToString(), BcBidImport.SigId, NewTable); dbContext.SaveChangesForImport(); } catch (Exception e) { string temp = string.Format("Error saving data (EquipmentTypeIndex: {0}): {1}", maxEquipTypeIndex, e.Message); performContext.WriteLine(temp); throw new DataException(temp); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); throw; } }
/// <summary> /// Import Rotations /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(RotationDoc[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); RotationDoc[] legacyItems = (RotationDoc[])ser.Deserialize(memoryStream); //Use this list to save a trip to query database in each iteration List <Equipment> equips = dbContext.Equipments .Include(x => x.DumpTruck) .Include(x => x.DistrictEquipmentType) .ToList(); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (RotationDoc item in legacyItems.WithProgress(progress)) { // see if we have this one already string oldKey = item.Equip_Id + item.Note_Dt + item.Created_Dt; ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == oldKey); // new entry if (importMap == null) { Note instance = null; CopyToInstance(dbContext, item, ref instance, equips); ImportUtility.AddImportMap(dbContext, OldTable, oldKey, NewTable, instance.Id); } else // update { Note instance = dbContext.Notes.FirstOrDefault(x => x.Id == importMap.NewKey); // record was deleted if (instance == null) { CopyToInstance(dbContext, item, ref instance, equips); // update the import map importMap.NewKey = instance.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToInstance(dbContext, item, ref instance, equips); // touch the import map importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } // save change to database periodically to avoid frequent writing to the database if (++ii % 1000 == 0) { try { ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, ii.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
static public void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // Check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, oldTable_Progress, BCBidImport.sigId); if (startPoint == BCBidImport.sigId) // This means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + xmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + oldTable; //Create Processer progress indicator performContext.WriteLine("Processing " + oldTable); var progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(EquipType[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.memoryStreamGenerator(xmlFileName, oldTable, fileLocation, rootAttr); HETSAPI.Import.EquipType[] legacyItems = (HETSAPI.Import.EquipType[])ser.Deserialize(memoryStream); int ii = 0; foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already. ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == oldTable && x.OldKey == item.Equip_Type_Id.ToString()); if (importMap == null) // new entry { if (item.Equip_Type_Id > 0) { Models.EquipmentType instance = null; CopyToInstance(performContext, dbContext, item, ref instance, systemId); ImportUtility.AddImportMap(dbContext, oldTable, item.Equip_Type_Id.ToString(), newTable, instance.Id); } } else // update { Models.EquipmentType instance = dbContext.EquipmentTypes.FirstOrDefault(x => x.Id == importMap.NewKey); if (instance == null) // record was deleted { CopyToInstance(performContext, dbContext, item, ref instance, systemId); // update the import map. importMap.NewKey = instance.Id; dbContext.ImportMaps.Update(importMap); } else // ordinary update. { CopyToInstance(performContext, dbContext, item, ref instance, systemId); // touch the import map. importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } } if (ii++ % 500 == 0) // Save change to database once a while to avoid frequent writing to the database. { try { ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, ii.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } } } try { performContext.WriteLine("*** Importing " + xmlFileName + " is Done ***"); ImportUtility.AddImportMap_For_Progress(dbContext, oldTable_Progress, BCBidImport.sigId.ToString(), BCBidImport.sigId); int iResult = dbContext.SaveChangesForImport(); } catch (Exception e) { string iStr = e.ToString(); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Regions /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed string completed = DateTime.Now.ToString("d") + "-" + "Completed"; ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == completed && x.NewKey == SigId); if (importMap != null) { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(HetsRegion[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); HetsRegion[] legacyItems = (HetsRegion[])ser.Deserialize(memoryStream); foreach (var item in legacyItems.WithProgress(progress)) { // see if we have this one already Region reg = null; importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Region_Id.ToString()); if (dbContext.LocalAreas.Count(x => String.Equals(x.Name, item.Name.Trim(), StringComparison.CurrentCultureIgnoreCase)) > 0) { reg = dbContext.Regions.FirstOrDefault(x => String.Equals(x.Name, item.Name.Trim(), StringComparison.CurrentCultureIgnoreCase)); } // new entry if (importMap == null && reg == null) { CopyToInstance(performContext, dbContext, item, ref reg, systemId); ImportUtility.AddImportMap(dbContext, OldTable, item.Region_Id.ToString(), NewTable, reg.Id); } else // update { // record was deleted if (reg == null) { CopyToInstance(performContext, dbContext, item, ref reg, systemId); // update the import map importMap.NewKey = reg.Id; dbContext.ImportMaps.Update(importMap); dbContext.SaveChangesForImport(); } else // ordinary update. { CopyToInstance(performContext, dbContext, item, ref reg, systemId); // touch the import map if (importMap != null) { importMap.LastUpdateTimestamp = DateTime.UtcNow; dbContext.ImportMaps.Update(importMap); } dbContext.SaveChangesForImport(); } } } performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMap(dbContext, OldTable, completed, NewTable, SigId); } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }
/// <summary> /// Import Users /// </summary> /// <param name="performContext"></param> /// <param name="dbContext"></param> /// <param name="fileLocation"></param> /// <param name="systemId"></param> public static void Import(PerformContext performContext, DbAppContext dbContext, string fileLocation, string systemId) { // check the start point. If startPoint == sigId then it is already completed int startPoint = ImportUtility.CheckInterMapForStartPoint(dbContext, OldTableProgress, BCBidImport.SigId); if (startPoint == BCBidImport.SigId) // this means the import job it has done today is complete for all the records in the xml file. { performContext.WriteLine("*** Importing " + XmlFileName + " is complete from the former process ***"); return; } try { string rootAttr = "ArrayOf" + OldTable; // create Processer progress indicator performContext.WriteLine("Processing " + OldTable); IProgressBar progress = performContext.WriteProgressBar(); progress.SetValue(0); // create serializer and serialize xml file XmlSerializer ser = new XmlSerializer(typeof(UserHets[]), new XmlRootAttribute(rootAttr)); MemoryStream memoryStream = ImportUtility.MemoryStreamGenerator(XmlFileName, OldTable, fileLocation, rootAttr); UserHets[] legacyItems = (UserHets[])ser.Deserialize(memoryStream); int ii = startPoint; // skip the portion already processed if (startPoint > 0) { legacyItems = legacyItems.Skip(ii).ToArray(); } foreach (UserHets item in legacyItems.WithProgress(progress)) { // see if we have this one already ImportMap importMap = dbContext.ImportMaps.FirstOrDefault(x => x.OldTable == OldTable && x.OldKey == item.Popt_Id.ToString()); User instance = dbContext.Users.FirstOrDefault(x => item.User_Cd.ToUpper().IndexOf(x.SmUserId.ToUpper(), StringComparison.Ordinal) >= 0); if (instance == null) { CopyToInstance(dbContext, item, ref instance, systemId); // new entry if (importMap == null && instance != null) { ImportUtility.AddImportMap(dbContext, OldTable, item.Popt_Id.ToString(), NewTable, instance.Id); } ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, (++ii).ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } } try { performContext.WriteLine("*** Importing " + XmlFileName + " is Done ***"); ImportUtility.AddImportMapForProgress(dbContext, OldTableProgress, BCBidImport.SigId.ToString(), BCBidImport.SigId); dbContext.SaveChangesForImport(); } catch (Exception e) { performContext.WriteLine("Error saving data " + e.Message); } } catch (Exception e) { performContext.WriteLine("*** ERROR ***"); performContext.WriteLine(e.ToString()); } }