private static void ImportValidity(SqlConnection conn, String userId, ProviderPortalEntities db) { MetadataUpload metadataUpload = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.LearningAimValidity, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.LearningAimValidities.Count() }; Stopwatch sw = new Stopwatch(); sw.Start(); // Import Validity SqlCommand comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_LearningAimValidity];", conn); comm.ExecuteNonQuery(); DataTable dt = OpenDataTable("SELECT LearnAimRef AS LearningAimRefId, ValidityCategory, StartDate, EndDate, LastNewStartDate FROM Core_LARS_Validity;"); BulkImportData(conn, dt, "[dbo].[Import_LearningAimValidity]"); comm = new SqlCommand("MERGE [dbo].[LearningAimValidity] dest USING [dbo].[Import_LearningAimValidity] source ON dest.LearningAimRefId = source.LearningAimRefId AND dest.ValidityCategory = source.ValidityCategory WHEN MATCHED THEN UPDATE SET dest.StartDate = source.StartDate, dest.EndDate = source.EndDate, dest.LastNewStartDate = source.LastNewStartDate WHEN NOT MATCHED THEN INSERT (LearningAimRefId, ValidityCategory, StartDate, EndDate, LastNewStartDate) VALUES (source.LearningAimRefId, source.ValidityCategory, source.StartDate, source.EndDate, source.LastNewStartDate);", conn); comm.ExecuteNonQuery(); sw.Stop(); metadataUpload.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; metadataUpload.RowsAfter = db.LearningAimValidities.Count(); db.MetadataUploads.Add(metadataUpload); }
private static void ImportAwardingOrganisation(SqlConnection conn, String userId, ProviderPortalEntities db) { MetadataUpload metadataUpload = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.LearningAimAwardOrg, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.LearningAimAwardOrgs.Count() }; Stopwatch sw = new Stopwatch(); sw.Start(); // Import Awarding Organisation SqlCommand comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_LearningAimAwardOrg];", conn); comm.ExecuteNonQuery(); DataTable dt = OpenDataTable("SELECT AwardOrgCode AS LearningAimAwardOrgCode, AwardOrgName FROM CoreReference_LARS_AwardOrgCode_Lookup;"); BulkImportData(conn, dt, "[dbo].[Import_LearningAimAwardOrg]"); comm = new SqlCommand("MERGE [dbo].[LearningAimAwardOrg] dest USING [dbo].[Import_LearningAimAwardOrg] source ON dest.LearningAimAwardOrgCode = source.LearningAimAwardOrgCode WHEN MATCHED THEN UPDATE SET dest.AwardOrgName = source.AwardOrgName WHEN NOT MATCHED THEN INSERT (LearningAimAwardOrgCode, AwardOrgName) VALUES (source.LearningAimAwardOrgCode, source.AwardOrgName);", conn); comm.ExecuteNonQuery(); sw.Stop(); metadataUpload.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; metadataUpload.RowsAfter = db.LearningAimAwardOrgs.Count(); db.MetadataUploads.Add(metadataUpload); }
private static void ImportLearningAims(SqlConnection conn, String userId, ProviderPortalEntities db) { MetadataUpload metadataUpload = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.LearningAim, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.LearningAims.Count() }; Stopwatch sw = new Stopwatch(); sw.Start(); // Import Learning Aims SqlCommand comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_LearningAim];", conn); comm.ExecuteNonQuery(); DataTable dt = OpenDataTable("SELECT LearnAimRef AS LearningAimRefId, LearnAimRefTitle AS LearningAimTitle, AwardOrgCode AS LearningAimAwardOrgCode, 0 AS IndependentLivingSkills, IIF(LearnDirectClassSystemCode1 = 'NUL', NULL, LearnDirectClassSystemCode1) AS LDCS1, IIF(LearnDirectClassSystemCode2 = 'NUL', NULL, LearnDirectClassSystemCode2) AS LDCS2, IIF(LearnDirectClassSystemCode3 = 'NUL', NULL, LearnDirectClassSystemCode3) AS LDCS3, Switch (NotionalNVQLevelv2 = 'X', '11', NotionalNVQLevelv2 = 'E', '10', NotionalNVQLevelv2 = '1', '1', NotionalNVQLevelv2 BETWEEN '2' AND '8', NotionalNVQLevelv2, NotionalNVQLevelv2 = 'H', '9') AS QualificationLevelId FROM Core_LARS_LearningDelivery WHERE (EffectiveTo IS NULL OR EffectiveTo > Now);"); BulkImportData(conn, dt, "[dbo].[Import_LearningAim]"); comm = new SqlCommand("MERGE [dbo].[LearningAim] dest USING [dbo].[Import_LearningAim] source ON dest.LearningAimRefId = source.LearningAimRefId WHEN MATCHED THEN UPDATE SET dest.LearningAimTitle = source.LearningAimTitle, dest.LearningAimAwardOrgCode = source.LearningAimAwardOrgCode, dest.IndependentLivingSkills = source.IndependentLivingSkills, dest.LearnDirectClassSystemCode1 = source.LDCS1, dest.LearnDirectClassSystemCode2 = source.LDCS2, dest.LearnDirectClassSystemCode3 = source.LDCS3, dest.QualificationLevelId = CAST(source.QualificationLevelId AS INT), dest.RecordStatusId = 2 WHEN NOT MATCHED THEN INSERT (LearningAimRefId, Qualification, LearningAimTitle, LearningAimAwardOrgCode, IndependentLivingSkills, LearnDirectClassSystemCode1, LearnDirectClassSystemCode2, LearnDirectClassSystemCode3, QualificationLevelId, RecordStatusId) VALUES (source.LearningAimRefId, '', source.LearningAimTitle, source.LearningAimAwardOrgCode, source.IndependentLivingSkills, source.LDCS1, source.LDCS2, source.LDCS3, CAST(source.QualificationLevelId AS INT), 2);", conn); comm.ExecuteNonQuery(); // Set status to delete for items not in import table comm = new SqlCommand("UPDATE [dbo].[LearningAim] SET RecordStatusId = 4 WHERE LearningAimRefId NOT IN (SELECT LearningAimRefId FROM [dbo].[Import_LearningAim]);", conn); comm.ExecuteNonQuery(); sw.Stop(); metadataUpload.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; metadataUpload.RowsAfter = db.LearningAims.Count(); db.MetadataUploads.Add(metadataUpload); }
private void GetLastUploadDetails(LARSUploadModel model) { String taskName = AutomationController.AutomatedTaskName.LARSImport.ToString(); AutomatedTask at = db.AutomatedTasks.Where(x => x.TaskName == taskName).FirstOrDefault(); if (at != null) { model.IsAutomationInProgress = at.InProgress; } MetadataUpload dataUpload = db.MetadataUploads.Where(m => m.MetadataUploadTypeId == (Int32)Constants.MetadataUploadType.LearningAim).OrderByDescending(m => m.CreatedDateTimeUtc).FirstOrDefault(); if (dataUpload != null) { if (dataUpload.AspNetUser == null) { AspNetUser user = db.AspNetUsers.Find(dataUpload.CreatedByUserId); if (user != null) { model.LastUploadedBy = user.Name; } } else { model.LastUploadedBy = dataUpload.AspNetUser.Name ?? dataUpload.AspNetUser.Email; } model.LastUploadDateTimeUtc = dataUpload.CreatedDateTimeUtc; DateTime.SpecifyKind(model.LastUploadDateTimeUtc.Value, DateTimeKind.Utc); model.LastUploadFileName = dataUpload.FileName; } }
private void GetLastUploadDetails(UploadAddressBaseModel model) { MetadataUpload dataUpload = db.MetadataUploads.Where(m => m.MetadataUploadTypeId == (Int32)Constants.MetadataUploadType.AddressBase).OrderByDescending(m => m.CreatedDateTimeUtc).FirstOrDefault(); if (dataUpload != null) { if (dataUpload.AspNetUser == null) { AspNetUser user = db.AspNetUsers.Find(dataUpload.CreatedByUserId); if (user != null) { model.LastUploadedBy = user.Name; } } else { model.LastUploadedBy = dataUpload.AspNetUser.Name; } model.LastUploadDateTimeUtc = dataUpload.CreatedDateTimeUtc; DateTime.SpecifyKind(model.LastUploadDateTimeUtc.Value, DateTimeKind.Utc); try { model.LastUploadFileName = String.Join("<br />", dataUpload.FileName.Split(';')); } catch {} } }
private static void ImportClassifications(SqlConnection conn, String userId, ProviderPortalEntities db) { MetadataUpload metadataUpload = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.LearnDirectClassification, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.LearnDirectClassifications.Count() }; Stopwatch sw = new Stopwatch(); sw.Start(); // Import Classifications SqlCommand comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_LearnDirectClassification];", conn); comm.ExecuteNonQuery(); DataTable dt = OpenDataTable("SELECT LearnDirectClassSystemCode AS LearnDirectClassificationRef, LearnDirectClassSystemCodeDesc FROM CoreReference_LARS_LearnDirectClassSystemCode_Lookup;"); BulkImportData(conn, dt, "[dbo].[Import_LearnDirectClassification]"); comm = new SqlCommand("MERGE [dbo].[LearnDirectClassification] dest USING [dbo].[Import_LearnDirectClassification] source ON dest.LearnDirectClassificationRef = source.LearnDirectClassificationRef WHEN MATCHED THEN UPDATE SET dest.LearnDirectClassSystemCodeDesc = source.LearnDirectClassSystemCodeDesc WHEN NOT MATCHED THEN INSERT (LearnDirectClassificationRef, LearnDirectClassSystemCodeDesc) VALUES (source.LearnDirectClassificationRef, source.LearnDirectClassSystemCodeDesc);", conn); comm.ExecuteNonQuery(); // Update LDCS Hierarchy SqlCommand commSP = new SqlCommand("[dbo].[up_UpdateLDCSHierarchy]", conn) { CommandType = CommandType.StoredProcedure }; commSP.ExecuteNonQuery(); sw.Stop(); metadataUpload.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; metadataUpload.RowsAfter = db.LearnDirectClassifications.Count(); db.MetadataUploads.Add(metadataUpload); }
private void GetLastUploadDetails(CodePointUploadModel model) { MetadataUpload dataUpload = db.MetadataUploads.Where(m => m.MetadataUploadTypeId == (Int32)Constants.MetadataUploadType.CodePoint).OrderByDescending(m => m.CreatedDateTimeUtc).FirstOrDefault(); if (dataUpload != null) { if (dataUpload.AspNetUser == null) { AspNetUser user = db.AspNetUsers.Find(dataUpload.CreatedByUserId); if (user != null) { model.LastUploadedBy = user.Name; } } else { model.LastUploadedBy = dataUpload.AspNetUser.Name; } model.LastUploadDateTimeUtc = dataUpload.CreatedDateTimeUtc; DateTime.SpecifyKind(model.LastUploadDateTimeUtc.Value, DateTimeKind.Utc); model.LastUploadFileName = dataUpload.FileName; } }
private static void ImportFrameworksAndStandards(SqlConnection conn, String userId, ProviderPortalEntities db) { MetadataUpload mduFrameworks = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.Frameworks, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.Frameworks.Count() }; MetadataUpload mduProgTypes = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.ProgTypes, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.ProgTypes.Count() }; MetadataUpload mduSectorSubjectAreaTier1 = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.SectorSubjectAreaTier1, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.SectorSubjectAreaTier1.Count() }; MetadataUpload mduSectorSubjectAreaTier2 = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.SectorSubjectAreaTier2, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.SectorSubjectAreaTier2.Count() }; Stopwatch sw = new Stopwatch(); sw.Start(); // Import Classifications SqlCommand comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_Framework];", conn); comm.ExecuteNonQuery(); DataTable dt = OpenDataTable("SELECT FWorkCode AS FrameworkCode, ProgType, PwayCode AS PathwayCode, LTrim(RTrim(Left(PathwayName, 2000))) AS PwayName, LTrim(RTrim(Left(NasTitle, 2000))) AS NasTitle, EffectiveFrom, EffectiveTo, SectorSubjectAreaTier1, SectorSubjectAreaTier2 FROM Core_LARS_Framework WHERE ProgType IN (2, 3, 20, 21, 22, 23) AND FworkCode >= 400 AND (EffectiveTo IS NULL OR EffectiveTo >= Now);"); BulkImportData(conn, dt, "[dbo].[Import_Framework]"); // Import Sector Subject Area Tier 1s comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_SectorSubjectAreaTier1];", conn); comm.ExecuteNonQuery(); dt = OpenDataTable("SELECT SectorSubjectAreaTier1 AS SectorSubjectAreaTier1Id, LTrim(RTrim(SectorSubjectAreaTier1Desc)) AS SectorSubjectAreaTier1Desc, LTrim(RTrim(SectorSubjectAreaTier1Desc2)) AS SectorSubjectAreaTier1Desc2, EffectiveFrom, EffectiveTo FROM CoreReference_LARS_SectorSubjectAreaTier1_Lookup;"); BulkImportData(conn, dt, "[dbo].[Import_SectorSubjectAreaTier1]"); // Import Sector Subject Area Tier 2s comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_SectorSubjectAreaTier2];", conn); comm.ExecuteNonQuery(); dt = OpenDataTable("SELECT SectorSubjectAreaTier2 AS SectorSubjectAreaTier2Id, LTrim(RTrim(SectorSubjectAreaTier2Desc)) AS SectorSubjectAreaTier2Desc, LTrim(RTrim(SectorSubjectAreaTier2Desc2)) AS SectorSubjectAreaTier2Desc2, EffectiveFrom, EffectiveTo FROM CoreReference_LARS_SectorSubjectAreaTier2_Lookup;"); BulkImportData(conn, dt, "[dbo].[Import_SectorSubjectAreaTier2]"); // Import ProgTypes comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_ProgType];", conn); comm.ExecuteNonQuery(); dt = OpenDataTable("SELECT ProgType AS ProgTypeId, ProgTypeDesc, ProgTypeDesc2, EffectiveFrom, EffectiveTo FROM CoreReference_LARS_ProgType_Lookup;"); BulkImportData(conn, dt, "[dbo].[Import_ProgType]"); // Merge Data comm = new SqlCommand("MERGE [dbo].[SectorSubjectAreaTier1] dest USING [dbo].[Import_SectorSubjectAreaTier1] source ON dest.SectorSubjectAreaTier1Id = source.SectorSubjectAreaTier1Id WHEN MATCHED THEN UPDATE SET dest.SectorSubjectAreaTier1Desc = source.SectorSubjectAreaTier1Desc, dest.SectorSubjectAreaTier1Desc2 = source.SectorSubjectAreaTier1Desc2, dest.EffectiveFrom = source.EffectiveFrom, dest.EffectiveTo = source.EffectiveTo, dest.ModifiedDateTimeUtc = GetUtcDate() WHEN NOT MATCHED THEN INSERT (SectorSubjectAreaTier1Id, SectorSubjectAreaTier1Desc, SectorSubjectAreaTier1Desc2, EffectiveFrom, EffectiveTo, CreatedDateTimeUtc) VALUES (source.SectorSubjectAreaTier1Id, source.SectorSubjectAreaTier1Desc, source.SectorSubjectAreaTier1Desc2, source.EffectiveFrom, source.EffectiveTo, GetUtcDate());", conn); comm.ExecuteNonQuery(); comm = new SqlCommand("MERGE [dbo].[SectorSubjectAreaTier2] dest USING [dbo].[Import_SectorSubjectAreaTier2] source ON dest.SectorSubjectAreaTier2Id = source.SectorSubjectAreaTier2Id WHEN MATCHED THEN UPDATE SET dest.SectorSubjectAreaTier2Desc = source.SectorSubjectAreaTier2Desc, dest.SectorSubjectAreaTier2Desc2 = source.SectorSubjectAreaTier2Desc2, dest.EffectiveFrom = source.EffectiveFrom, dest.EffectiveTo = source.EffectiveTo, dest.ModifiedDateTimeUtc = GetUtcDate() WHEN NOT MATCHED THEN INSERT (SectorSubjectAreaTier2Id, SectorSubjectAreaTier2Desc, SectorSubjectAreaTier2Desc2, EffectiveFrom, EffectiveTo, CreatedDateTimeUtc) VALUES (source.SectorSubjectAreaTier2Id, source.SectorSubjectAreaTier2Desc, source.SectorSubjectAreaTier2Desc2, source.EffectiveFrom, source.EffectiveTo, GetUtcDate());", conn); comm.ExecuteNonQuery(); comm = new SqlCommand("MERGE [dbo].[ProgType] dest USING [dbo].[Import_ProgType] source ON dest.ProgTypeId = source.ProgTypeId WHEN MATCHED THEN UPDATE SET dest.ProgTypeDesc = LTrim(RTrim(source.ProgTypeDesc)), dest.ProgTypeDesc2 = LTrim(RTrim(source.ProgTypeDesc2)), dest.EffectiveFrom = source.EffectiveFrom, dest.EffectiveTo = dest.EffectiveTo, dest.ModifiedDateTimeUtc = GetUtcDate() WHEN NOT MATCHED THEN INSERT (ProgTypeId, ProgTypeDesc, ProgTypeDesc2, EffectiveFrom, EffectiveTo, CreatedDateTimeUtc) VALUES (source.ProgTypeId, LTrim(RTrim(source.ProgTypeDesc)), LTrim(RTrim(source.ProgTypeDesc2)), source.EffectiveFrom, source.EffectiveTo, GetUtcDate());", conn); comm.ExecuteNonQuery(); comm = new SqlCommand("UPDATE [dbo].[ProgType] SET ProgTypeDesc = LTrim(RTrim(ProgTypeDesc)), ProgTypeDesc2 = LTrim(RTrim(ProgTypeDesc2));", conn); comm.ExecuteNonQuery(); comm = new SqlCommand("MERGE [dbo].[Framework] dest USING [dbo].[Import_Framework] source ON dest.FrameworkCode = source.FrameworkCode AND dest.ProgType = source.ProgType AND dest.PathwayCode = source.PathwayCode WHEN MATCHED THEN UPDATE SET dest.PathwayName = source.PathwayName, dest.NasTitle = source.NasTitle, dest.EffectiveFrom = source.EffectiveFrom, dest.EffectiveTo = source.EffectiveTo, dest.SectorSubjectAreaTier1 = source.SectorSubjectAreaTier1, dest.SectorSubjectAreaTier2 = source.SectorSubjectAreaTier2, dest.ModifiedDateTimeUtc = GetUtcDate(), dest.RecordStatusId = 2 WHEN NOT MATCHED THEN INSERT (FrameworkCode, ProgType, PathwayCode, PathwayName, NasTitle, EffectiveFrom, EffectiveTo, SectorSubjectAreaTier1, SectorSubjectAreaTier2, CreatedDateTimeUtc, RecordStatusId) VALUES (source.FrameworkCode, source.ProgType, source.PathwayCode, source.PathwayName, source.NasTitle, source.EffectiveFrom, source.EffectiveTo, source.SectorSubjectAreaTier1, source.SectorSubjectAreaTier2, GetUtcDate(), 2);", conn); comm.ExecuteNonQuery(); // Delete Data Not In This File comm = new SqlCommand("UPDATE [dbo].[Framework] SET RecordStatusId = 4 WHERE CAST(FrameworkCode AS VARCHAR) + '~' + CAST(ProgType AS VARCHAR) + '~' + CAST(PathwayCode AS VARCHAR) NOT IN (SELECT CAST(FrameworkCode AS VARCHAR) + '~' + CAST(ProgType AS VARCHAR) + '~' + CAST(PathwayCode AS VARCHAR) FROM [dbo].[Import_Framework]);", conn); comm.ExecuteNonQuery(); sw.Stop(); mduFrameworks.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; mduFrameworks.RowsAfter = db.Frameworks.Count(); db.MetadataUploads.Add(mduFrameworks); mduProgTypes.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; mduProgTypes.RowsAfter = db.ProgTypes.Count(); db.MetadataUploads.Add(mduProgTypes); mduSectorSubjectAreaTier1.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; mduSectorSubjectAreaTier1.RowsAfter = db.SectorSubjectAreaTier1.Count(); db.MetadataUploads.Add(mduSectorSubjectAreaTier1); mduSectorSubjectAreaTier2.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; mduSectorSubjectAreaTier2.RowsAfter = db.SectorSubjectAreaTier2.Count(); db.MetadataUploads.Add(mduSectorSubjectAreaTier2); // Import Standards MetadataUpload mduStandards = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.Standards, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.Standards.Count() }; MetadataUpload mduStandardSectorCodes = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.StandardSectorCodes, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = Path.GetFileName(mdbFilename), FileSizeInBytes = (int)mdbFileSize, RowsBefore = db.StandardSectorCodes.Count() }; sw = new Stopwatch(); sw.Start(); // Import Standards comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_Standard];", conn); comm.ExecuteNonQuery(); dt = OpenDataTable("SELECT StandardCode, Version, LTrim(RTrim(StandardName)) AS StandardName, LTrim(RTrim(StandardSectorCode)) AS StandardSectorCode, EffectiveFrom, EffectiveTo, LTrim(RTrim(Left(UrlLink, 1000))) AS Url, SectorSubjectAreaTier1, SectorSubjectAreaTier2, NotionalEndLevel, OtherBodyApprovalRequired FROM Core_LARS_Standard WHERE StandardCode >= 1 AND (EffectiveTo IS NULL OR EffectiveTo >= Now) AND CStr(StandardCode) + '~' + CStr(Version) IN (SELECT CStr(StandardCode) + '~' + CStr(MaxVersion) FROM (SELECT StandardCode, max(Version) AS MaxVersion FROM Core_LARS_Standard GROUP BY StandardCode));"); BulkImportData(conn, dt, "[dbo].[Import_Standard]"); // Import Standard Sector Codes comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_StandardSectorCode];", conn); comm.ExecuteNonQuery(); dt = OpenDataTable("SELECT LTrim(RTrim(StandardSectorCode)) AS StandardSectorCodeId, LTrim(RTrim(StandardSectorCodeDesc)) AS StandardSectorCodeDesc, LTrim(RTrim(StandardSectorCodeDesc2)) AS StandardSectorCodeDesc2, EffectiveFrom, EffectiveTo FROM CoreReference_LARS_StandardSectorCode_Lookup;"); BulkImportData(conn, dt, "[dbo].[Import_StandardSectorCode]"); // Merge Data comm = new SqlCommand("MERGE [dbo].[StandardSectorCode] dest USING [dbo].[Import_StandardSectorCode] source ON dest.StandardSectorCodeId = source.StandardSectorCodeId WHEN MATCHED THEN UPDATE SET dest.StandardSectorCodeDesc = source.StandardSectorCodeDesc, dest.StandardSectorCodeDesc2 = source.StandardSectorCodeDesc2, dest.EffectiveFrom = source.EffectiveFrom, dest.EffectiveTo = source.EffectiveTo, dest.ModifiedDateTimeUtc = GetUtcDate() WHEN NOT MATCHED THEN INSERT (StandardSectorCodeId, StandardSectorCodeDesc, StandardSectorCodeDesc2, EffectiveFrom, EffectiveTo, CreatedDateTimeUtc) VALUES (source.StandardSectorCodeId, source.StandardSectorCodeDesc, source.StandardSectorCodeDesc2, source.EffectiveFrom, source.EffectiveTo, GetUtcDate());", conn); comm.ExecuteNonQuery(); comm = new SqlCommand("MERGE [dbo].[Standard] dest USING [dbo].[Import_Standard] source ON dest.StandardCode = source.StandardCode AND dest.Version = source.Version WHEN MATCHED THEN UPDATE SET dest.StandardName = source.StandardName, dest.StandardSectorCode = source.StandardSectorCode, dest.EffectiveFrom = source.EffectiveFrom, dest.EffectiveTo = source.EffectiveTo, dest.UrlLink = source.UrlLink, dest.ModifiedDateTimeUtc = GetUtcDate(), dest.RecordStatusId = 2, dest.NotionalEndLevel = source.NotionalEndLevel, dest.OtherBodyApprovalRequired = source.OtherBodyApprovalRequired WHEN NOT MATCHED THEN INSERT (StandardCode, Version, StandardName, StandardSectorCode, EffectiveFrom, EffectiveTo, UrlLink, CreatedDateTimeUtc, RecordStatusId, NotionalEndLevel, OtherBodyApprovalRequired) VALUES (source.StandardCode, source.Version, source.StandardName, source.StandardSectorCode, source.EffectiveFrom, source.EffectiveTo, source.UrlLink, GetUtcDate(), 2, source.NotionalEndLevel, source.OtherBodyApprovalRequired);", conn); comm.ExecuteNonQuery(); // Delete Data Not In This File comm = new SqlCommand("UPDATE [dbo].[Standard] SET RecordStatusId = 4 WHERE CAST(StandardCode AS VARCHAR) + '~' + CAST(Version AS VARCHAR) NOT IN (SELECT CAST(StandardCode AS VARCHAR) + '~' + CAST(Version AS VARCHAR) FROM [dbo].[Import_Standard]);", conn); comm.ExecuteNonQuery(); sw.Stop(); mduStandards.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; mduStandards.RowsAfter = db.Standards.Count(); db.MetadataUploads.Add(mduStandards); mduStandardSectorCodes.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; mduStandardSectorCodes.RowsAfter = db.StandardSectorCodes.Count(); db.MetadataUploads.Add(mduStandardSectorCodes); }
public ActionResult Index(CodePointUploadModel model) { if (ModelState.IsValid) { try { String[] validFileTypes = { ".zip" }; Boolean validFileType = false; String CodePointFolder = Constants.ConfigSettings.CodePointUploadVirtualDirectoryName; if (CodePointFolder.EndsWith(@"\")) { CodePointFolder = CodePointFolder.Substring(0, CodePointFolder.Length - 1); } // Check if config setting is valid if (String.IsNullOrEmpty(CodePointFolder) || !Directory.Exists(CodePointFolder)) { ModelState.AddModelError("", AppGlobal.Language.GetText(this, "CodePointFolderNotConfigured", "Configuration setting VirtualDirectoryNameForStoringCodePointFiles is not set or is incorrect")); } foreach (String fileType in validFileTypes) { if (model.File.FileName.ToLower().EndsWith(fileType)) { validFileType = true; break; } } if (!validFileType) { ModelState.AddModelError("File", AppGlobal.Language.GetText(this, "ZIPFilesOnly", "Please upload a ZIP file")); } else { String ZIPFile = Path.Combine(CodePointFolder, "CodePoint.zip"); // Save the zip file model.File.SaveAs(ZIPFile); // Unzip all the CSV files using (ZipArchive za = ZipFile.OpenRead(ZIPFile)) { foreach (ZipArchiveEntry entry in za.Entries.Where(entry => entry.Name.ToLower().EndsWith(".csv")).Where(entry => entry.Name.ToLower() != "code-point_open_column_headers.csv")) { entry.ExtractToFile(Path.Combine(CodePointFolder, entry.Name), true); } } // Delete the zip file System.IO.File.Delete(ZIPFile); MetadataUpload metadataUpload = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.CodePoint, CreatedByUserId = Permission.GetCurrentUserGuid().ToString(), CreatedDateTimeUtc = DateTime.UtcNow, FileName = model.File.FileName, FileSizeInBytes = model.File.ContentLength, RowsBefore = db.GeoLocations.Count() }; var sw = new Stopwatch(); sw.Start(); // Import the new data String[] csvFiles = Directory.GetFiles(CodePointFolder, "*.csv"); if (csvFiles.GetLength(0) == 0) { ModelState.AddModelError("", AppGlobal.Language.GetText(this, "UnableToFindCSVFile", "Unable to find any CSV files to import")); DeleteProgressMessage(); } else { AddOrReplaceProgressMessage(AppGlobal.Language.GetText(this, "StartingImport", "Starting Import...")); Boolean cancellingImport = false; String importingMessageText = AppGlobal.Language.GetText(this, "ImportingFileXOfY", "Importing file {0} of {1}..."); String mergingMessageText = AppGlobal.Language.GetText(this, "MergeData", "Merging Data..."); String removingTempDataMessageText = AppGlobal.Language.GetText(this, "RemovingTemporaryData", "Removing Temporary Data..."); String importSuccessfulMessageText = AppGlobal.Language.GetText(this, "ImportSuccessful", "Code Point Data Successfully Imported"); String importCancelledMessageText = AppGlobal.Language.GetText(this, "ImportCancelled", "Code Point Data Import Cancelled"); String importErrorMessageText = AppGlobal.Language.GetText(this, "ImportError", "Error Importing Code Point Data : {0}"); new Thread(() => { try { ProviderPortalEntities _db = new ProviderPortalEntities(); // Open the database using (SqlConnection conn = new SqlConnection(_db.Database.Connection.ConnectionString)) { conn.Open(); using (SqlTransaction transaction = conn.BeginTransaction()) { // Truncate the temporary import table just incase there's still data in there. SqlCommand comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_GeoLocation];", conn, transaction); comm.ExecuteNonQuery(); // Setup the DataTable DataTable dt = new DataTable(); dt.Columns.Add(new DataColumn { ColumnName = "Postcode", AllowDBNull = false, DataType = typeof(String), MaxLength = 8 }); dt.Columns.Add(new DataColumn { ColumnName = "Lat", AllowDBNull = false, DataType = typeof(Decimal) }); dt.Columns.Add(new DataColumn { ColumnName = "Lng", AllowDBNull = false, DataType = typeof(Decimal) }); dt.Columns.Add(new DataColumn { ColumnName = "Northing", AllowDBNull = false, DataType = typeof(Decimal) }); dt.Columns.Add(new DataColumn { ColumnName = "Easting", AllowDBNull = false, DataType = typeof(Decimal) }); Int32 i = 1; foreach (String csvFile in csvFiles) { // Check if we have stopped the import if (IsCancellingImport(new ProviderPortalEntities())) { break; } // Remove all the rows dt.Clear(); // Write the progress message AddOrReplaceProgressMessage(_db, String.Format(importingMessageText, i++, csvFiles.GetLength(0))); // Import the CSV file using (CsvReader csv = new CsvReader(new StreamReader(csvFile))) { const Int32 POSTCODE = 0; const Int32 EASTING = 2; const Int32 NORTHING = 3; csv.Configuration.HasHeaderRecord = false; while (csv.Read()) { String Postcode = CorrectPostcode(csv[POSTCODE]); Double Northing = Convert.ToDouble(csv[NORTHING]); Double Easting = Convert.ToDouble(csv[EASTING]); LatLon latlon = LatLonConversions.ConvertOSToLatLon(Easting, Northing); const Int32 decimalPlaces = 6; if (Postcode.IndexOf(" ") == -1) { Postcode = Postcode.Substring(0, Postcode.Length - 3) + " " + Postcode.Substring(Postcode.Length - 3, 3); } DataRow dr = dt.NewRow(); dr["Postcode"] = Postcode; dr["Lat"] = Math.Round(latlon.Latitude, decimalPlaces); dr["Lng"] = Math.Round(latlon.Longitude, decimalPlaces); dr["Northing"] = Northing; dr["Easting"] = Easting; dt.Rows.Add(dr); // Every 100 rows, check whether we are cancelling the import if (csv.Row % 100 == 0 && IsCancellingImport(new ProviderPortalEntities())) { cancellingImport = true; break; } } csv.Dispose(); // Delete the file to tidy up space as quickly as possible try { System.IO.File.Delete(csvFile); } catch { } } if (!cancellingImport) { // Copy the data to the Import_GeoLocation Table BulkImportData(conn, dt, transaction); } } cancellingImport = IsCancellingImport(new ProviderPortalEntities()); if (!cancellingImport) { // Merge the data into the GeoLocation Table AddOrReplaceProgressMessage(_db, mergingMessageText); comm = new SqlCommand("MERGE [dbo].[GeoLocation] dest USING [dbo].[Import_GeoLocation] source ON dest.Postcode = source.Postcode WHEN MATCHED THEN UPDATE SET dest.Lat = source.Lat, dest.Lng = source.Lng, dest.Northing = source.Northing, dest.Easting = source.Easting WHEN NOT MATCHED THEN INSERT (Postcode, Lat, Lng, Northing, Easting) VALUES (source.Postcode, source.Lat, source.Lng, source.Northing, source.Easting);", conn, transaction) { CommandTimeout = 3600 /* 1 Hour */ }; comm.ExecuteNonQuery(); // Update any Address Rows that don't currently have any Latitude or Longitude try { comm = new SqlCommand("UPDATE Address SET Address.Latitude = GeoLocation.Lat, Address.Longitude = GeoLocation.lng FROM Address INNER JOIN GeoLocation ON Address.Postcode = GeoLocation.Postcode WHERE Address.Latitude IS NULL AND GeoLocation.Lat IS NOT NULL;", conn, transaction) { CommandTimeout = 3600 /* 1 Hour */ }; comm.ExecuteNonQuery(); } catch {} } // Truncate the temporary import table if (!cancellingImport) { AddOrReplaceProgressMessage(_db, removingTempDataMessageText); comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_GeoLocation];", conn, transaction); comm.ExecuteNonQuery(); } if (!IsCancellingImport(new ProviderPortalEntities())) { // Commit the transaction transaction.Commit(); #region Update After Row Counts // Add the current row count to MetadataUpload // Save timings sw.Stop(); metadataUpload.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; metadataUpload.RowsAfter = _db.GeoLocations.Count(); _db.MetadataUploads.Add(metadataUpload); _db.SaveChanges(); #endregion } else { // Rollback the transaction try { transaction.Rollback(); _db.Dispose(); } catch { } } // Close the database conn.Close(); } } // Delete all the uploaded and expanded files try { foreach (FileInfo file in new DirectoryInfo(CodePointFolder).GetFiles()) { file.Delete(); } } catch { } // Write Success or Cancelled message AddOrReplaceProgressMessage(_db, cancellingImport ? importCancelledMessageText : importSuccessfulMessageText, true); } catch (Exception ex) { AddOrReplaceProgressMessage(new ProviderPortalEntities(), String.Format(importErrorMessageText, ex.Message), true); // Delete all the uploaded and expanded files try { foreach (FileInfo file in new DirectoryInfo(CodePointFolder).GetFiles()) { file.Delete(); } } catch { } } }).Start(); } } } catch (Exception ex) { // Create a model error ModelState.AddModelError("", ex.Message); } } // No Errors so redirect to index which will show messages if (ModelState.IsValid) { return(RedirectToAction("Index")); } GetLastUploadDetails(model); return(View(model)); }
public ActionResult Index(FEChoicesUploadModel model) { if (ModelState.IsValid) { const Int32 fieldUPIN = 0; const Int32 fieldLearnerDestination = 1; const Int32 fieldEmployerSatisfaction = 2; const Int32 fieldLearnerSatisfaction = 3; try { String[] validFileTypes = { ".csv" }; Boolean validFileType = false; foreach (String fileType in validFileTypes) { if (model.File.FileName.ToLower().EndsWith(fileType)) { validFileType = true; break; } } if (!validFileType) { ModelState.AddModelError("File", AppGlobal.Language.GetText(this, "CSVFilesOnly", "Please upload a CSV file")); } else { var metadataUpload = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.FEChoices, CreatedByUserId = Permission.GetCurrentUserGuid().ToString(), CreatedDateTimeUtc = DateTime.UtcNow, FileName = model.File.FileName, FileSizeInBytes = model.File.ContentLength, RowsBefore = db.FEChoices.Count() }; var sw = new Stopwatch(); sw.Start(); // Delete the current data foreach (FEChoice fe in db.FEChoices.ToList()) { db.Entry(fe).State = EntityState.Deleted; } // Import the new data using (TextReader csvFile = new StreamReader(model.File.InputStream)) { using (var csvReader = new CsvReader(csvFile)) { csvReader.Configuration.HasHeaderRecord = true; while (csvReader.Read()) { Int32?UPIN = GetIntOrNull(csvReader.GetField <String>(fieldUPIN)); if (UPIN != null) { Double?LearnerDestination = GetDoubleOrNull(csvReader.GetField <String>(fieldLearnerDestination)); Double?EmployerSatisfaction = GetDoubleOrNull(csvReader.GetField <String>(fieldEmployerSatisfaction)); Double?LearnerSatisfaction = GetDoubleOrNull(csvReader.GetField <String>(fieldLearnerSatisfaction)); FEChoice fec = db.FEChoices.Find(UPIN); if (fec != null) { db.Entry(fec).State = EntityState.Modified; } else { fec = new FEChoice { UPIN = UPIN.Value }; db.FEChoices.Add(fec); } fec.LearnerDestination = LearnerDestination; fec.LearnerSatisfaction = LearnerSatisfaction; fec.EmployerSatisfaction = EmployerSatisfaction; fec.CreatedDateTimeUtc = DateTime.UtcNow; } else if (ModelState.IsValid) { // UPIN Is null - Create a model error ModelState.AddModelError("", AppGlobal.Language.GetText(this, "UPINIsNull", "UPIN is empty")); } } } } if (ModelState.IsValid) { sw.Stop(); metadataUpload.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; metadataUpload.RowsAfter = db.FEChoices.Count(); db.MetadataUploads.Add(metadataUpload); // Save the changes db.SaveChanges(); ViewBag.Message = AppGlobal.Language.GetText(this, "ImportSuccessful", "FE Choices Data Successfully Imported"); } } } catch (Exception ex) { // Create a model error ModelState.AddModelError("", ex.Message); } } GetLastUploadDetails(model); return(View(model)); }
public ActionResult StartImport() { // Check if import is already in progress. ProgressMessage pm = db.ProgressMessages.Find(MessageArea); if (pm != null) { if (!pm.IsComplete) { ModelState.AddModelError("", AppGlobal.Language.GetText(this, "ImportInProgress", "Import Already In Progress")); } else { DeleteProgressMessage(); } } String AddressBaseFolder = Constants.ConfigSettings.AddressBaseUploadVirtualDirectoryName; if (AddressBaseFolder.EndsWith(@"\")) { AddressBaseFolder = AddressBaseFolder.Substring(0, AddressBaseFolder.Length - 1); } // Check if config setting is valid if (String.IsNullOrEmpty(AddressBaseFolder) || !Directory.Exists(AddressBaseFolder)) { ModelState.AddModelError("", AppGlobal.Language.GetText(this, "AddressBaseFolderNotConfigured", "Configuration setting VirtualDirectoryNameForStoringAddressBaseFiles is not set or is incorrect")); DeleteProgressMessage(); } if (ModelState.IsValid) { // Get the CSV Filenames String[] zipFiles = Directory.GetFiles(AddressBaseFolder, "*.zip"); if (zipFiles.GetLength(0) == 0) { ModelState.AddModelError("", AppGlobal.Language.GetText(this, "UnableToFindZIPFile", "Unable to find ZIP file(s) to import")); DeleteProgressMessage(); } else { AddOrReplaceProgressMessage(AppGlobal.Language.GetText(this, "StartingImport", "Starting Import...")); Boolean cancellingImport = false; String importingMessageText = AppGlobal.Language.GetText(this, "ImportingFileXOfY", "Importing file {0} of {1}..."); String unzippingMessageText = AppGlobal.Language.GetText(this, "UnzippingFileXOfY", "Unzipping file {0} of {1}..."); String mergingMessageText = AppGlobal.Language.GetText(this, "MergeData", "Merging Data..."); String removingTempDataMessageText = AppGlobal.Language.GetText(this, "RemovingTemporaryData", "Removing Temporary Data..."); String importSuccessfulMessageText = AppGlobal.Language.GetText(this, "ImportSuccessful", "Address Base Data Successfully Imported"); String importCancelledMessageText = AppGlobal.Language.GetText(this, "ImportCancelled", "Address Base Data Import Cancelled"); String importErrorMessageText = AppGlobal.Language.GetText(this, "ImportError", "Error Importing Address Base : {0}"); String userId = Permission.GetCurrentUserId(); new Thread(() => { try { ProviderPortalEntities _db = new ProviderPortalEntities(); const Int32 UPRN = 0; const Int32 ORGANISATION_NAME = 3; const Int32 DEPARTMENT_NAME = 4; const Int32 PO_BOX_NUMBER = 5; const Int32 SUB_BUILDING_NAME = 6; const Int32 BUILDING_NAME = 7; const Int32 BUILDING_NUMBER = 8; const Int32 DEPENDENT_THOROUGHFARE_NAME = 9; const Int32 THOROUGHFARE_NAME = 10; const Int32 POST_TOWN = 11; const Int32 DOUBLE_DEPENDENT_LOCALITY = 12; const Int32 DEPENDENT_LOCALITY = 13; const Int32 POSTCODE = 14; const Int32 CHANGE_TYPE = 22; const Int32 LATITUDE = 18; const Int32 LONGITUDE = 19; var totalSize = 0; var fileNames = String.Empty; foreach (var item in zipFiles) { totalSize += (int)new FileInfo(item).Length; fileNames += Path.GetFileName(item) + ";"; } var metadataUpload = new MetadataUpload { MetadataUploadTypeId = (int)Constants.MetadataUploadType.AddressBase, CreatedByUserId = userId, CreatedDateTimeUtc = DateTime.UtcNow, FileName = fileNames.TrimEnd(';'), FileSizeInBytes = totalSize, RowsBefore = _db.AddressBases.Count() }; var sw = new Stopwatch(); sw.Start(); // Open the database SqlConnection conn = new SqlConnection(_db.Database.Connection.ConnectionString); conn.Open(); // Truncate the temporary import table just incase there's still data in there. SqlCommand comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_AddressBase];", conn); comm.ExecuteNonQuery(); // Setup the DataTable DataTable dt = new DataTable(); dt.Columns.Add(new DataColumn { ColumnName = "UPRN", AllowDBNull = false, DataType = typeof(Int64) }); dt.Columns.Add(new DataColumn { ColumnName = "Postcode", AllowDBNull = false, DataType = typeof(String), MaxLength = 8 }); dt.Columns.Add(new DataColumn { ColumnName = "OrganisationName", AllowDBNull = true, DataType = typeof(String), MaxLength = 60 }); dt.Columns.Add(new DataColumn { ColumnName = "DepartmentName", AllowDBNull = true, DataType = typeof(String), MaxLength = 60 }); dt.Columns.Add(new DataColumn { ColumnName = "POBoxNumber", AllowDBNull = true, DataType = typeof(String), MaxLength = 6 }); dt.Columns.Add(new DataColumn { ColumnName = "BuildingName", AllowDBNull = true, DataType = typeof(String), MaxLength = 50 }); dt.Columns.Add(new DataColumn { ColumnName = "SubBuildingName", AllowDBNull = true, DataType = typeof(String), MaxLength = 30 }); dt.Columns.Add(new DataColumn { ColumnName = "BuildingNumber", AllowDBNull = true, DataType = typeof(Int32) }); dt.Columns.Add(new DataColumn { ColumnName = "DependentThoroughfareName", AllowDBNull = true, DataType = typeof(String), MaxLength = 80 }); dt.Columns.Add(new DataColumn { ColumnName = "ThoroughfareName", AllowDBNull = true, DataType = typeof(String), MaxLength = 80 }); dt.Columns.Add(new DataColumn { ColumnName = "Town", AllowDBNull = true, DataType = typeof(String), MaxLength = 30 }); dt.Columns.Add(new DataColumn { ColumnName = "DoubleDependentLocality", AllowDBNull = true, DataType = typeof(String), MaxLength = 35 }); dt.Columns.Add(new DataColumn { ColumnName = "DependentLocality", AllowDBNull = true, DataType = typeof(String), MaxLength = 35 }); dt.Columns.Add(new DataColumn { ColumnName = "Latitude", AllowDBNull = true, DataType = typeof(Decimal) }); dt.Columns.Add(new DataColumn { ColumnName = "Longitude", AllowDBNull = true, DataType = typeof(Decimal) }); Int32 i = 1; foreach (String zipFile in zipFiles) { // Write the progress message AddOrReplaceProgressMessage(_db, String.Format(unzippingMessageText, i, zipFiles.GetLength(0))); // Unzip the file System.IO.Compression.ZipFile.ExtractToDirectory(zipFile, AddressBaseFolder); // Delete the zip file TryToDeleteFile(zipFile); String[] csvFiles = Directory.GetFiles(AddressBaseFolder, "*.csv"); if (csvFiles.GetLength(0) == 0) { ModelState.AddModelError("", AppGlobal.Language.GetText(this, "UnableToFindCSVFile", "Unable to find CSV file to import")); DeleteProgressMessage(); } foreach (String csvFile in csvFiles) { // Check if we have stopped the import if (IsCancellingImport(new ProviderPortalEntities())) { break; } // Remove all the rows dt.Clear(); // Write the progress message AddOrReplaceProgressMessage(_db, String.Format(importingMessageText, i++, zipFiles.GetLength(0))); // Import the CSV file using (CsvReader csv = new CsvReader(new StreamReader(csvFile))) { csv.Configuration.HasHeaderRecord = false; while (csv.Read()) { if (csv[CHANGE_TYPE] == "D") { AddressBase addressBase = _db.AddressBases.Find(Convert.ToInt32(csv[UPRN])); if (addressBase != null) { _db.Entry(addressBase).State = EntityState.Deleted; _db.SaveChanges(); } } else { DataRow dr = dt.NewRow(); dr["UPRN"] = Convert.ToInt64(csv[UPRN]); dr["Postcode"] = csv[POSTCODE]; dr["OrganisationName"] = csv[ORGANISATION_NAME]; dr["DepartmentName"] = csv[DEPARTMENT_NAME]; dr["POBoxNumber"] = csv[PO_BOX_NUMBER]; dr["BuildingName"] = csv[BUILDING_NAME]; dr["SubBuildingName"] = csv[SUB_BUILDING_NAME]; dr["BuildingNumber"] = csv[BUILDING_NUMBER] == "" ? DBNull.Value : (Object)Convert.ToInt32(csv[BUILDING_NUMBER]); dr["DependentThoroughfareName"] = csv[DEPENDENT_THOROUGHFARE_NAME]; dr["ThoroughfareName"] = csv[THOROUGHFARE_NAME]; dr["Town"] = csv[POST_TOWN]; dr["DoubleDependentLocality"] = csv[DOUBLE_DEPENDENT_LOCALITY]; dr["DependentLocality"] = csv[DEPENDENT_LOCALITY]; dr["Latitude"] = csv[LATITUDE] == "" ? (Object)DBNull.Value : Convert.ToDecimal(csv[LATITUDE]); dr["Longitude"] = csv[LONGITUDE] == "" ? (Object)DBNull.Value : Convert.ToDecimal(csv[LONGITUDE]); dt.Rows.Add(dr); } // Every 100 rows, check whether we are cancelling the import if (csv.Row % 100 == 0 && IsCancellingImport(new ProviderPortalEntities())) { cancellingImport = true; break; } } csv.Dispose(); // Delete the file to tidy up space as quickly as possible TryToDeleteFile(csvFile); } if (!cancellingImport) { // Copy the data to the Import_BaseAddress Table BulkImportData(conn, dt); } } } cancellingImport = IsCancellingImport(new ProviderPortalEntities()); if (!cancellingImport) { // Merge the data into the AddressBase Table AddOrReplaceProgressMessage(_db, mergingMessageText); comm = new SqlCommand("MERGE [dbo].[AddressBase] dest USING [dbo].[Import_AddressBase] source ON dest.UPRN = source.UPRN WHEN MATCHED THEN UPDATE SET dest.Postcode = source.Postcode, dest.OrganisationName = source.OrganisationName, dest.DepartmentName = source.DepartmentName, dest.POBoxNumber = source.POBoxNumber, dest.BuildingName = source.BuildingName, dest.SubBuildingname = source.SubBuildingName, dest.BuildingNumber = source.BuildingNumber, dest.DependentThoroughfareName = source.DependentThoroughfareName, dest.ThoroughfareName = source.ThoroughfareName, dest.Town = source.Town, dest.DoubleDependentLocality = source.DoubleDependentLocality, dest.DependentLocality = source.DependentLocality, dest.Latitude = source.Latitude, dest.Longitude = source.Longitude WHEN NOT MATCHED THEN INSERT (UPRN, Postcode, OrganisationName, DepartmentName, POBoxNumber, BuildingName, SubBuildingName, BuildingNumber, DependentThoroughfareName, ThoroughfareName, Town, DoubleDependentLocality, DependentLocality, Latitude, Longitude) VALUES (source.UPRN, source.Postcode, source.OrganisationName, source.DepartmentName, source.POBoxNumber, source.BuildingName, source.SubBuildingName, source.BuildingNumber, source.DependentThoroughfareName, source.ThoroughfareName, source.Town, source.DoubleDependentLocality, source.DependentLocality, source.Latitude, source.Longitude);", conn) { CommandTimeout = 7200 /* 2 Hours */ }; comm.ExecuteNonQuery(); } // Truncate the temporary import table if (!cancellingImport) { AddOrReplaceProgressMessage(_db, removingTempDataMessageText); } comm = new SqlCommand("TRUNCATE TABLE [dbo].[Import_AddressBase];", conn) { CommandTimeout = 3600 /* 1 Hours */ }; comm.ExecuteNonQuery(); // Close the database conn.Close(); // Save timings if (!cancellingImport) { sw.Stop(); _db.Database.CommandTimeout = 3600; /* 1 Hour */ metadataUpload.DurationInMilliseconds = (int)sw.ElapsedMilliseconds; metadataUpload.RowsAfter = _db.AddressBases.Count(); _db.MetadataUploads.Add(metadataUpload); _db.SaveChanges(); } // Delete all the uploaded and expanded files (if any still exist) try { foreach (FileInfo file in new DirectoryInfo(AddressBaseFolder).GetFiles()) { file.Delete(); } } catch {} // Write Success or Cancelled message AddOrReplaceProgressMessage(_db, cancellingImport ? importCancelledMessageText : importSuccessfulMessageText, true); } catch (Exception ex) { // Log the error AppGlobal.Log.WriteError(String.Format("Error importing AddressBase: Message:{0}, Inner Message:{1}", ex.Message, ex.InnerException != null ? ex.InnerException.Message : "")); // Write Error to Display to User AddOrReplaceProgressMessage(new ProviderPortalEntities(), String.Format(importErrorMessageText, ex.InnerException != null ? ex.InnerException.Message : ex.Message), true); // Delete all the uploaded and expanded files (if any still exist) try { foreach (FileInfo file in new DirectoryInfo(AddressBaseFolder).GetFiles()) { file.Delete(); } } catch {} } }).Start(); } } if (ModelState.IsValid) { return(RedirectToAction("Index")); } GetViewData(); return(View("Index", new UploadAddressBaseModel())); }