internal void AddCoalQualityData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action <string, double> UpdateStatus, int approxNumLines, string connectionString, Guid NKDProjectID, bool checkForDuplicates, bool doImportOverwrite) { WorkflowProcedureSequenceNumber = 1; Guid? lastHoleID = new Guid(); string lastStage = ""; decimal lastFromDepth = -999999; decimal lastToDepth = -999999; bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new NKDC(connectionString, null)) { Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = NKDProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary <ColumnMap, Guid> resultsColumns = new Dictionary <ColumnMap, Guid>(); Dictionary <Guid, AssayGroupTest> assayGroups = new Dictionary <Guid, AssayGroupTest>(); ColumnMap cmProgram = null; ColumnMap cmStage = null; ColumnMap cmSizeFraction = null; ColumnMap cmWashFraction = null; foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[RESULT")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } else if (cim.targetColumnName.Trim().StartsWith("[PROGRAM")) { cmProgram = cim; } else if (cim.targetColumnName.Trim().StartsWith("[STAGE")) { cmStage = cim; } else if (cim.targetColumnName.Trim().StartsWith("[SIZE FRACTION")) { cmSizeFraction = cim; } else if (cim.targetColumnName.Trim().StartsWith("[WASH FRACTION")) { cmWashFraction = cim; } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair <ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameter("AssayTypeName", cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; SqlTransaction trans; trans = connection.BeginTransaction(); List <SqlCommand> commands = new List <SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float pct = 0; float bct = 1; // report every X records int repCount = 0; float fNumLines = (float)approxNumLines; Dictionary <string, Guid> holeIDLookups = new Dictionary <string, Guid>(); Dictionary <string, int> columnIDX = new Dictionary <string, int>(); int fkLookupCount = 0; BaseImportTools.PopulateCMapShortcut(importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); int seqNum = 1; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { // digest a row of input data List <string> items = BaseImportTools.ParseTestLine(line, importMap.inputDelimiter); Guid holeID = new Guid(); Decimal fromDepth = new Decimal(-9999999999); Decimal toDepth = new Decimal(-9999999999); string sampleNumber = null; string sampleName = null; string labBatchNumber = null; string labsampleNumber = null; // find mapped values by name int idxVal = 0; // -- Get the hole ID foreign key relation bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, NKDProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleID", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleName = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("LabSampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("LabBatchNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } // Now iddentify the program, Stage, Size fraction and wash fraction // get the program text string programType = null; if (cmProgram != null) { programType = items[cmProgram.sourceColumnNumber]; } string stage = null; if (cmStage != null) { stage = items[cmStage.sourceColumnNumber]; } string sizeFraction = null; if (cmSizeFraction != null) { sizeFraction = items[cmSizeFraction.sourceColumnNumber]; } string washFraction = null; if (cmWashFraction != null) { washFraction = items[cmWashFraction.sourceColumnNumber]; } IQueryable <AssayGroupSubsample> toUpdate = null; bool isDuplicate = false; var washID = (from o in entityObj.Parameters where o.ParameterType == "AssayPrecondition" && o.ParameterName == "Wash fraction" select o.ParameterID).FirstOrDefault(); var sizeID = (from o in entityObj.Parameters where o.ParameterType == "AssayPrecondition" && o.ParameterName == "Size fraction" select o.ParameterID).FirstOrDefault(); if (checkForDuplicates) { if (hasFrom && hasTo) { // here we need to check that not duplicated toUpdate = (from o in entityObj.AssayGroupSubsamples where o.OriginalSample.HeaderID == holeID && o.OriginalSample.FromDepth == fromDepth && o.OriginalSample.ToDepth == toDepth && o.AssayGroupWorkflowProcedure.WorkflowStateName == stage && o.AssayGroupWorkflowProcedure.AssayGroupWorkflow.WorkflowName == programType && (sizeFraction.Trim() == "" || o.AssayGroupSubsamplePrecondition.Any(f => f.PreconditionName == sizeFraction && f.PreconditionParameterID == sizeID)) && (washFraction.Trim() == "" || o.AssayGroupSubsamplePrecondition.Any(f => f.PreconditionName == washFraction && f.PreconditionParameterID == washID)) select o); if (toUpdate.Any()) { isDuplicate = true; } } if (isDuplicate) { hasDuplicateIntervals = true; mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); if (!doImportOverwrite) { mos.recordsFailed++; continue; } else { foreach (var upd in toUpdate) { upd.Sequence = seqNum; } } } } Sample xs = null; if (isDuplicate == true) { xs = toUpdate.First().OriginalSample; } else { xs = (from o in entityObj.Samples where o.HeaderID == holeID && o.FromDepth == fromDepth && o.ToDepth == toDepth select o).FirstOrDefault(); if (xs == null) { xs = new Sample(); xs.SampleID = Guid.NewGuid(); xs.SampleName = sampleName; xs.SampleNumber = sampleNumber; xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; entityObj.Samples.AddObject(xs); } } // see if the interfal has changed, wherby we will need to reset the sequence ID if (holeID != lastHoleID) { if (fromDepth != lastFromDepth && toDepth != lastToDepth) { // new interval WorkflowProcedureSequenceNumber = 1; } } if (!stage.Trim().Equals(lastStage)) { WorkflowProcedureSequenceNumber = 1; } lastHoleID = holeID; lastFromDepth = fromDepth; lastToDepth = toDepth; lastStage = stage; if (!isDuplicate) { AssayGroupWorkflow agWorkflowProgram = GetAssayGroupWorkflow(entityObj, programType, agGuid); AssayGroupWorkflowProcedure agWorkflowStage = GetAssayGroupWorkflowProcedure(entityObj, stage, agWorkflowProgram); AssayGroupSubsample agSS = new AssayGroupSubsample(); agSS.AssayGroupID = agGuid; agSS.FromDepth = fromDepth; agSS.ToDepth = toDepth; agSS.Sequence = seqNum; agSS.AssayGroupSubsampleID = Guid.NewGuid(); agSS.SampleAntecedentID = xs.SampleID; agSS.OriginalSample = xs; agSS.AssayGroupWorkflowProcedureID = agWorkflowStage.AssayGroupWorkflowProcedureID; agSS.AssayGroupWorkflowProcedure = agWorkflowStage; entityObj.AssayGroupSubsamples.AddObject(agSS); entityObj.SaveChanges(); AssayGroupSubsamplePrecondition agSizeFraction = GetAssayGroupPrecondition(entityObj, sizeFraction, "Size fraction", agSS.AssayGroupSubsampleID); AssayGroupSubsamplePrecondition agWashFraction = GetAssayGroupPrecondition(entityObj, washFraction, "Wash fraction", agSS.AssayGroupSubsampleID); toUpdate = (new[] { agSS }).AsQueryable(); } if (isDuplicate) { entityObj.SaveChanges(); } foreach (var upd in toUpdate.ToList()) { // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns foreach (KeyValuePair <ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = null; Decimal result = default(decimal); string resultText = null; bool parsedOK = false; if (items.Count >= cm.sourceColumnNumber) { parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); resultText = items[cm.sourceColumnNumber]; } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } if (string.IsNullOrWhiteSpace(resultText)) { continue; } if (!isDuplicate) { testResult = new AssayGroupTestResult(); testResult.AssayGroupSubsampleID = upd.AssayGroupSubsampleID; testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.LabBatchNumber = labBatchNumber; entityObj.AssayGroupTestResults.AddObject(testResult); testResult.VersionUpdated = currentUpdateTimestamp; if (parsedOK) { testResult.LabResult = result; } testResult.LabResultText = resultText; //testResult.LabSampleNumber = labsampleNumber; mos.recordsAdded++; } else { var tempRes = (parsedOK) ? result : default(decimal?); entityObj.AssayGroupTestResults.Where(f => f.AssayGroupSubsampleID == upd.AssayGroupSubsampleID && f.AssayGroupTest.Parameter.ParameterName == cm.sourceColumnName ) .Update((f) => new AssayGroupTestResult { LabResult = tempRes, LabResultText = resultText, VersionUpdated = currentUpdateTimestamp }); mos.recordsUpdated++; } } } seqNum++; tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing coal quality data to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing qualities to database ", 0); mos.AddErrorMessage("Error writing data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }
private void OverwriteCollarRecord(ModelImportStatus mos, List <List <string> > rejectedLines, ImportDataMap importMap, string connectionString, Guid NKDProjectID, Action <string, double> UpdateStatus, Dictionary <string, string> holeWarningMessages) { SqlConnection connection = null; SqlConnection secondaryConnection = null; try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List <SqlCommand> commands = new List <SqlCommand>(); int tb = 0; int transactionBatchLimit = 10; // open the filestream and read the first line float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)rejectedLines.Count(); // get the column containing the hole name ColumnMap cmapHeader = importMap.FindItemsByTargetName("HoleName"); cmapHeader.importDataType = ImportDataMap.TEXTDATATYPE; int headerIDX = cmapHeader.sourceColumnNumber; int numberOfHolesAdded = 0; int linesRead = 0; int ct = 1; // get all fo the header IDs in one go before we try the insert Dictionary <string, Guid> holeIDLookups = CollarQueries.FindHeaderGuidsForProject(NKDProjectID); // this loop makes sure that any guids are properly types so that a text string for that guid can be passed into the query foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; if (isFKColumn) { cmap.importDataType = ImportDataMap.TEXTDATATYPE; } } foreach (List <string> columnData in rejectedLines) { linesRead++; repCount++; bct++; string statementPart1 = "UPDATE " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; // using the column map, pick out the hole name field and see if it is in the database already string headerNameItem = columnData[headerIDX]; string headerGUID = ""; bool lv = holeIDLookups.ContainsKey(headerNameItem); if (!lv) { // oops - no hole ID with this name - should not happen though!! } else { Guid holeGuid = new Guid(); holeIDLookups.TryGetValue(headerNameItem, out holeGuid); headerGUID = holeGuid.ToString(); } #region mappsearch // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = columnData[colID]; } string targetCol = cmap.targetColumnName; // ignore mapped hole name and project ID columns if (targetCol.Trim().Equals("HoleName") || targetCol.Trim().Equals("ProjectID")) { continue; } string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + "="; if (isFKColumn) { // go and search for the appropriate value from the foreign key table string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true); if (newValue == null) { clauseValues += "NULL,"; } else { clauseValues += "\'" + newValue + "\',"; } } else { if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-") || columnValue.Trim().Length == 0) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } } else if (cmap.importDataType.Equals(ImportDataMap.TIMESTAMPDATATYPE)) { DateTime dtr = new DateTime(); bool parsed = DateTime.TryParse(columnValue, out dtr); if (parsed) { columnValue = "\'" + dtr.ToString("yyyy-MM-dd hh:mm:ss tt") + "\'"; } else { columnValue = "NULL"; } } else { columnValue = "\'" + columnValue + "\'"; } clauseValues += columnValue + ","; } } #endregion // now just a hack to remove the final coma from the query clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "SET " + clauseValues + " WHERE HeaderID=\'" + headerGUID + "\' AND ProjectID=\'" + NKDProjectID.ToString() + "\';"; SqlCommand sqc = new SqlCommand(commandText, connection, trans); string msg = ""; //holeWarningMessages.TryGetValue(headerNameItem, out msg); msg = "Hole " + headerNameItem + " was overwritten with new data"; holeWarningMessages[headerNameItem] = msg; numberOfHolesAdded++; if (commitToDB) { sqc.ExecuteNonQuery(); } tb++; if (tb == transactionBatchLimit) { // commit batch, then renew the transaction if (commitToDB) { trans.Commit(); numCommits++; // trans = null; trans = connection.BeginTransaction(); } // reset counter tb = 0; } ct++; } if (tb > 0) { if (commitToDB) { trans.Commit(); } numCommits++; } mos.recordsUpdated = numberOfHolesAdded; UpdateStatus("Finished writing collars to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing collars to database ", 0); mos.AddErrorMessage("Error writing collar data at line " + rejectedLines.Count + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } }
internal void AddAssayData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action <string, double> UpdateStatus, int approxNumLines, string connectionString, Guid NKDProjectID, bool checkForDuplicates, bool doImportOverwrite) { bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new NKDC(connectionString, null)) { //entityObj.Configuration.AutoDetectChangesEnabled = false; Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = NKDProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary <ColumnMap, Guid> resultsColumns = new Dictionary <ColumnMap, Guid>(); Dictionary <Guid, AssayGroupTest> assayGroups = new Dictionary <Guid, AssayGroupTest>(); foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[ASSAY")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair <ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameterForAssayTypeName(cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database string line = null; try { int domainColIDX = -1; connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List <SqlCommand> commands = new List <SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; FileStream fs = null; try { //fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read); sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } line = null; float pct = 0; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; Dictionary <string, Guid> holeIDLookups = new Dictionary <string, Guid>(); Dictionary <string, int> columnIDX = new Dictionary <string, int>(); int fkLookupCount = 0; BaseImportTools.PopulateCMapShortcut(importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); AssayQueries assayQueries = new AssayQueries(); List <string> items = new List <string>(); if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { var append = BaseImportTools.ParseTestLine(line, importMap.inputDelimiter); if (items.Count == 0 || append.Count == importMap.MaxColumns) { items = append; } else if (items.Count < importMap.MaxColumns) { items[items.Count - 1] = items[items.Count - 1] + append[0]; items.AddRange(append.Skip(1)); } if (items.Count < importMap.MaxColumns) { mos.AddWarningMessage(string.Format("Bad CSV file, attempted to join....{0}", linesRead)); continue; } else if (items.Count > importMap.MaxColumns) { mos.AddWarningMessage(string.Format("FAILED! Line {0}. Bad CSV file, attempted to join.", linesRead)); items.Clear(); continue; } // digest a row of input data Guid holeID = new Guid(); Decimal?fromDepth = null; Decimal?toDepth = null; string sampleNumber = null; string labBatchNumber = null; string labsampleNumber = null; Decimal?sampleMassKg = null; Decimal?dryMassKg = null; string standardSampleTypeName = null; // find mapped values by name //ColumnMap cmap = importMap.FindItemsByTargetName("HeaderID"); int idxVal = 0; bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, NKDProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName(); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName("ToDepth"); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } List <Sample> duplicateList = null; bool isDuplicateInterval = false; if (checkForDuplicates) { if (hasFrom && hasTo) { // here we need to check that the hole is not duplicated duplicateList = assayQueries.CheckForDuplicate(holeID, fromDepth, toDepth); if (duplicateList.Count > 0) { isDuplicateInterval = true; } } if (isDuplicateInterval) { hasDuplicateIntervals = true; mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); if (!doImportOverwrite) { mos.recordsFailed++; continue; } } } //cmap = importMap.FindItemsByTargetName("SampleNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) // if (cmap != null) { string ii = items[idxVal]; sampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabSampleNumber"); //if (cmap != null) idxVal = 0; foundEntry = columnIDX.TryGetValue("LabSampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabBatchNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("LabBatchNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleMassKg", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { sampleMassKg = val; } } idxVal = 0; foundEntry = columnIDX.TryGetValue("DryMassKg", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { dryMassKg = val; } } idxVal = 0; foundEntry = columnIDX.TryGetValue("StandardSampleTypeName", out idxVal); if (foundEntry) { string ii = items[idxVal]; standardSampleTypeName = ii; } Sample xs = new Sample(); if (isDuplicateInterval == true) { xs = duplicateList.First(); } else { xs.SampleID = Guid.NewGuid(); xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; xs.SampleNumber = sampleNumber; xs.SampleMassKg = sampleMassKg; if (!string.IsNullOrWhiteSpace(standardSampleTypeName)) { var t = entityObj.DictionarySampleTypes.Select(f => new { f.SampleTypeID, f.StandardSampleTypeName }).FirstOrDefault(f => f.StandardSampleTypeName == standardSampleTypeName); if (t != null) { xs.SampleTypeID = t.SampleTypeID; } } xs.DryMassKg = dryMassKg; } // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns bool assayUpdated = false; bool assayAdded = false; var results = new List <AssayGroupTestResult>(); foreach (KeyValuePair <ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = new AssayGroupTestResult(); /*bool assayResFound = false; * * if (isDuplicateInterval) * { * List<AssayGroupTestResult> testResults = assayQueries.GetDuplicateResult(xs.SampleID, cm.sourceColumnName); * if (testResults.Count > 0) * { * testResult = testResults.First(); * assayResFound = true; * } * }*/ //if(!assayResFound) // { testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.VersionUpdated = currentUpdateTimestamp; //} testResult.LabBatchNumber = labBatchNumber; testResult.LabSampleName = labsampleNumber; Decimal result = new Decimal(); if (items.Count >= cm.sourceColumnNumber) { bool parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); if (parsedOK) { testResult.LabResult = result; } testResult.LabResultText = items[cm.sourceColumnNumber]; } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } results.Add(testResult); //if (isDuplicateInterval == false) //{ assayAdded = true; //}else{ // if (!assayResFound) // { // entityObj.AssayGroupTestResult.Add(testResult); // assayAdded = true; // } // else { // assayUpdated = true; // } //} } var resultsToSave = (from o in results where !string.IsNullOrWhiteSpace(o.LabResultText) select o); if (!resultsToSave.Any()) { continue; } if (!isDuplicateInterval) { entityObj.Samples.AddObject(xs); } foreach (var save in resultsToSave) { entityObj.AssayGroupTestResults.AddObject(save); } if (assayAdded == true) { mos.recordsAdded++; } if (assayUpdated) { mos.recordsUpdated++; } tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; //Console.WriteLine("Processing line "+ct); items.Clear(); } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing assays to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing assays to database ", 0); mos.AddErrorMessage("Error writing assay data at line " + linesRead + ":\n" + line + "\n\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing connection to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }
internal void AddCollarData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action <string, double> UpdateStatus, int approxNumLines, string connectionString, List <string> existingHoleNames, Guid NKDProjectID, bool overwrite) { bool hasDuplicateHoles = false; // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; Dictionary <string, int> uniqueHoleNames = new Dictionary <string, int>(); List <List <string> > rejectedLines = new List <List <string> >(); Dictionary <string, string> holeWarningMessages = new Dictionary <string, string>(); // this loop makes sure that any guids are properly types so that a text string for that guid can be passed into the query foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; if (isFKColumn) { cmap.importDataType = ImportDataMap.TEXTDATATYPE; } } // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List <SqlCommand> commands = new List <SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; // get the column containing the hole name ColumnMap cmapHeader = importMap.FindItemsByTargetName("HoleName"); cmapHeader.importDataType = ImportDataMap.TEXTDATATYPE; int headerIDX = cmapHeader.sourceColumnNumber; int numberOfHolesAdded = 0; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; bct++; linesRead++; if (linesRead == 75) { bool breakHere; int cc = 0; double ff = Math.PI * 10; } if (ct >= importMap.dataStartLine) { string statementPart1 = "INSERT INTO " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; string clauseParameters = ""; List <string> items = parseTestLine(line, importMap.inputDelimiter); // using the column map, pick out the hole name field and see if it is in the database already string headerNameItem = items[headerIDX]; // check if this holename is a duplicate in the file if (headerNameItem.Trim().Equals("A11A")) { bool b = true; } bool hasHolenameEntryInFile = uniqueHoleNames.ContainsKey(headerNameItem.Trim()); if (hasHolenameEntryInFile) { int val = uniqueHoleNames[headerNameItem.Trim()]; uniqueHoleNames[headerNameItem.Trim()] = val++; holeWarningMessages.Add(headerNameItem + " (" + val + ")", "Hole: " + headerNameItem + " at line " + linesRead + " already exists in the input file - skipping."); mos.recordsFailed++; continue; } else { uniqueHoleNames.Add(headerNameItem.Trim(), 1); } // check if this hole exists in the database if (existingHoleNames.Contains(headerNameItem.Trim())) { if (!holeWarningMessages.ContainsKey(headerNameItem)) { holeWarningMessages.Add(headerNameItem, "Hole: " + headerNameItem + " already exists in this project. Skipping record at line " + linesRead + "."); if (!overwrite) { mos.recordsFailed++; } } else { } mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; hasDuplicateHoles = true; rejectedLines.Add(items); continue; } #region mappsearch // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = items[colID]; } string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; if (isFKColumn) { // go and search for the appropriate value from the foreign key table string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true); if (newValue == null) { clauseParameters += "NULL,"; } else { clauseParameters += "\'" + newValue + "\',"; } } else { if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-") || columnValue.Trim().Length == 0) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } clauseParameters += columnValue + ","; } else if (cmap.importDataType.Equals(ImportDataMap.TIMESTAMPDATATYPE)) { var parsed = DateUtils.CleanDate(columnValue); if (parsed.HasValue) { columnValue = "\'" + parsed.Value.ToString("yyyy-MM-dd hh:mm:ss tt") + "\'"; } else { columnValue = "NULL"; } clauseParameters += columnValue + ","; } else { if (columnValue.Equals("-") || columnValue.Trim().Length == 0) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } } clauseParameters += "\'" + columnValue + "\',"; } } } #endregion // now just a hack to remove the final coma from the query clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1); clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")"; SqlCommand sqc = new SqlCommand(commandText, connection, trans); numberOfHolesAdded++; if (commitToDB) { try { sqc.ExecuteNonQuery(); } catch (Exception ex) { string err = "" + ex.ToString(); throw ex; } } tb++; if (tb == transactionBatchLimit) { // commit batch, then renew the transaction if (commitToDB) { trans.Commit(); numCommits++; // trans = null; trans = connection.BeginTransaction(); } // reset counter tb = 0; } } ct++; } } if (tb > 0) { if (commitToDB) { trans.Commit(); } numCommits++; } mos.recordsAdded = numberOfHolesAdded; UpdateStatus("Finished writing collars to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing collars to database ", 0); mos.AddErrorMessage("Error writing collar data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } if (hasDuplicateHoles == true && overwrite == true) { OverwriteCollarRecord(mos, rejectedLines, importMap, connectionString, NKDProjectID, UpdateStatus, holeWarningMessages); } foreach (KeyValuePair <string, string> kvp in holeWarningMessages) { string v = kvp.Value; mos.AddWarningMessage(v); } mos.linesReadFromSource = linesRead; }
internal void AddSurveyData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action <string, double> UpdateStatus, int approxNumLines, string connectionString, Guid NKDProjectID, bool overwrite, bool checkForDuplicates) { bool duplicateFound = false; // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; Dictionary <Guid, List <string> > rejectedLines = new Dictionary <Guid, List <string> >(); Dictionary <string, string> holeWarningMessages = new Dictionary <string, string>(); using (var entityObj = new NKDC(connectionString, null)) { //entityObj.Configuration.AutoDetectChangesEnabled = false; SurveyQueries sq = new SurveyQueries(); // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); int numCommits = 0; SqlTransaction trans; //trans = connection.BeginTransaction(System.Data.IsolationLevel.Snapshot); List <SqlCommand> commands = new List <SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; // get the column containing the hole name Dictionary <string, Guid> holeIDLookups = new Dictionary <string, Guid>(); int numberOfHolesAdded = 0; ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); ColumnMap depthCmap = importMap.FindItemsByTargetName("Depth"); float percentComplete = 0; int headerIDX = headerCmap.sourceColumnNumber; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; percentComplete = ((float)ct / approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { string statementPart1 = "INSERT INTO " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; string clauseParameters = ""; List <string> items = parseTestLine(line, importMap.inputDelimiter); // using the column map, pick out the hole name field and see if it is in the database already string headerNameItem = items[headerIDX]; // check if this holename is a duplicate in the file bool foundHole = false; Guid holeID = new Guid(); bool lv = holeIDLookups.ContainsKey(headerNameItem); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(headerNameItem, headerCmap, secondaryConnection, false, "HoleName", NKDProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(headerNameItem, holeID); } } else { holeIDLookups.TryGetValue(headerNameItem, out holeID); foundHole = true; } if (!foundHole) { mos.AddWarningMessage("Failed to find hole " + headerNameItem + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } if (checkForDuplicates == true && depthCmap != null) { // check for duplicate depths string d = items[depthCmap.sourceColumnNumber]; decimal dt = 0; bool isParsed = decimal.TryParse(d, out dt); if (isParsed) { List <Guid> rr = sq.CheckForDuplicate(holeID, dt, secondaryConnection); //List<Guid> rr = sq.CheckForDuplicate(holeID, dt, entityObj); if (rr.Count > 0) { duplicateFound = true; if (!rejectedLines.ContainsKey(rr.First())) { rejectedLines.Add(rr.First(), items); mos.AddWarningMessage("Duplicate depth found in survey data for hole " + headerNameItem + " at depth " + d + " on line " + linesRead); UpdateStatus("Duplicate depth found in survey data for hole " + headerNameItem + " at depth " + d, percentComplete); } else { mos.AddWarningMessage("Duplicate depth found in survey data file for hole " + headerNameItem + " at depth " + d + " on line " + linesRead); UpdateStatus("Duplicate depth found in survey data file for hole " + headerNameItem + " at depth " + d, percentComplete); rejectedLines[rr.First()] = items; } if (!overwrite) { mos.recordsFailed++; } continue; } } } #region mappsearch // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { if (cmap.targetColumnName.Trim().Equals("HeaderID")) { string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; clauseParameters += "\'" + holeID.ToString() + "\',"; } else { bool isFKColumn = cmap.hasFKRelation; int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = items[colID]; } string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; if (isFKColumn) { // go and search for the appropriate value from the foreign key table string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true); columnValue = newValue; if (newValue != null && newValue.Trim().Length > 0) { clauseParameters += "\'" + columnValue + "\',"; } else { clauseParameters += "NULL,"; } } else { if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-") || columnValue.Equals("")) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } clauseParameters += columnValue + ","; } else { //if (columnValue.Equals("-")) //{ // if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) // { // columnValue = cmap.defaultValue; // } //} clauseParameters += "\'" + columnValue + "\',"; } } } } #endregion // now just a hack to remove the final coma from the query clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1); clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")"; //SqlCommand sqc = new SqlCommand(commandText, connection, trans); SqlCommand sqc = new SqlCommand(commandText, connection); numberOfHolesAdded++; if (commitToDB) { try { sqc.ExecuteNonQuery(); } catch (Exception ex) { mos.AddErrorMessage("Failed to insert items on line " + linesRead + "."); UpdateStatus("Failed to insert items on line " + linesRead + ".", percentComplete); mos.recordsFailed++; mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } UpdateStatus("Updating from line " + linesRead, percentComplete); tb++; //if (tb == transactionBatchLimit) //{ // // commit batch, then renew the transaction // if (commitToDB) // { // trans.Commit(); // numCommits++; // // trans = null; // trans = connection.BeginTransaction(System.Data.IsolationLevel.Snapshot); // } // // reset counter // tb = 0; //} } ct++; } } if (tb > 0) { //if (commitToDB) //{ // trans.Commit(); //} numCommits++; } mos.recordsAdded = numberOfHolesAdded; UpdateStatus("Finished writing records to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing records to database ", 0); mos.AddErrorMessage("Error writing records data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } if (duplicateFound == true && overwrite == true) { OverwriteSurveyRecord(mos, rejectedLines, importMap, connectionString, NKDProjectID, UpdateStatus, holeWarningMessages); } foreach (KeyValuePair <string, string> kvp in holeWarningMessages) { string v = kvp.Value; mos.AddWarningMessage(v); } mos.linesReadFromSource = linesRead; } }