private static void PopulateCMapShortcut(string lookupString, FormatSpecification.ImportDataMap importMap, Dictionary<string, int> columnIDX) { ColumnMap cmap = importMap.FindItemsByTargetName(lookupString); if (cmap != null) { columnIDX.Add(lookupString, cmap.sourceColumnNumber); } }
public static void PopulateCMapShortcut(FormatSpecification.ImportDataMap importMap, Dictionary<string, int> columnIDX) { foreach (ColumnMap cm in importMap.columnMap) { if (cm.targetColumnName[0] != '[') columnIDX.Add(cm.targetColumnName.Trim(), cm.sourceColumnNumber); } }
internal void AddAssayData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid XODBProjectID, bool checkForDuplicates, bool doImportOverwrite) { bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new XODBC(connectionString, null)) { //entityObj.Configuration.AutoDetectChangesEnabled = false; Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = XODBProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary<ColumnMap, Guid> resultsColumns = new Dictionary<ColumnMap, Guid>(); Dictionary<Guid, AssayGroupTest> assayGroups = new Dictionary<Guid, AssayGroupTest>(); foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[ASSAY")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameterForAssayTypeName(cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database try { int domainColIDX = -1; connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; FileStream fs = null; try { //fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read); sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float pct = 0; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); Dictionary<string, int> columnIDX = new Dictionary<string, int>(); int fkLookupCount = 0; PopulateCMapShortcut("HeaderID", importMap, columnIDX); PopulateCMapShortcut("FromDepth", importMap, columnIDX); PopulateCMapShortcut("ToDepth", importMap, columnIDX); PopulateCMapShortcut("SampleNumber", importMap, columnIDX); PopulateCMapShortcut("LabSampleNumber", importMap, columnIDX); PopulateCMapShortcut("LabBatchNumber", importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); AssayQueries assayQueries = new AssayQueries(); if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { // digest a row of input data List<string> items = parseTestLine(line, importMap.inputDelimiter); Guid holeID = new Guid(); Decimal fromDepth = new Decimal(-9999999999); Decimal toDepth = new Decimal(-9999999999); string sampleNumber = null; string labBatchNumber = null; string labsampleNumber = null; // find mapped values by name //ColumnMap cmap = importMap.FindItemsByTargetName("HeaderID"); int idxVal = 0; bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, XODBProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName(); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName("ToDepth"); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } List<Sample> duplicateList = null; bool isDuplicateInterval = false; if (checkForDuplicates) { if (hasFrom && hasTo) { // here we need to check that the hole is not duplicated duplicateList = assayQueries.CheckForDuplicate(holeID, fromDepth, toDepth); if (duplicateList.Count > 0) { isDuplicateInterval = true; } } if (isDuplicateInterval) { hasDuplicateIntervals = true; mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); if (!doImportOverwrite) { mos.recordsFailed++; continue; } } } //cmap = importMap.FindItemsByTargetName("SampleNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) // if (cmap != null) { string ii = items[idxVal]; sampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabSampleNumber"); //if (cmap != null) idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabBatchNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } Sample xs = new Sample(); if (isDuplicateInterval == true) { xs = duplicateList.First(); } else { xs.SampleID = Guid.NewGuid(); xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; entityObj.Samples.AddObject(xs); } // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns bool assayUpdated = false; bool assayAdded = false; foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = new AssayGroupTestResult(); /*bool assayResFound = false; if (isDuplicateInterval) { List<AssayGroupTestResult> testResults = assayQueries.GetDuplicateResult(xs.SampleID, cm.sourceColumnName); if (testResults.Count > 0) { testResult = testResults.First(); assayResFound = true; } }*/ //if(!assayResFound) // { testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.VersionUpdated = currentUpdateTimestamp; //} testResult.LabBatchNumber = labBatchNumber; // testResult.LabSampleName = labsampleNumber; Decimal result = new Decimal(); if (items.Count >= cm.sourceColumnNumber) { bool parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); if (parsedOK) { testResult.LabResult = result; } else { testResult.LabResultText = items[cm.sourceColumnNumber]; } } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } //if (isDuplicateInterval == false) //{ entityObj.AssayGroupTestResults.AddObject(testResult); assayAdded = true; //}else{ // if (!assayResFound) // { // entityObj.AssayGroupTestResult.Add(testResult); // assayAdded = true; // } // else { // assayUpdated = true; // } //} } if (assayAdded == true) { mos.recordsAdded++; } if (assayUpdated) { mos.recordsUpdated++; } tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; //Console.WriteLine("Processing line "+ct); } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing assays to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing assays to database ", 0); mos.AddErrorMessage("Error writing assay data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }
internal void AddCollarData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, List<string> existingHoleNames, Guid XODBProjectID, bool overwrite) { bool hasDuplicateHoles = false; // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; Dictionary<string,int> uniqueHoleNames = new Dictionary<string,int>(); List<List<string>> rejectedLines = new List<List<string>>(); Dictionary<string, string> holeWarningMessages = new Dictionary<string, string>(); // this loop makes sure that any guids are properly types so that a text string for that guid can be passed into the query foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; if (isFKColumn) { cmap.importDataType = ImportDataMap.TEXTDATATYPE; } } // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; // get the column containing the hole name ColumnMap cmapHeader = importMap.FindItemsByTargetName("HoleName"); cmapHeader.importDataType = ImportDataMap.TEXTDATATYPE; int headerIDX = cmapHeader.sourceColumnNumber; int numberOfHolesAdded = 0; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; bct++; linesRead++; if (linesRead == 75) { bool breakHere; int cc = 0; double ff = Math.PI * 10; } if (ct >= importMap.dataStartLine) { string statementPart1 = "INSERT INTO " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; string clauseParameters = ""; List<string> items = parseTestLine(line, importMap.inputDelimiter); // using the column map, pick out the hole name field and see if it is in the database already string headerNameItem = items[headerIDX]; // check if this holename is a duplicate in the file if(headerNameItem.Trim().Equals("A11A")){ bool b = true; } bool hasHolenameEntryInFile = uniqueHoleNames.ContainsKey(headerNameItem.Trim()); if (hasHolenameEntryInFile) { int val = uniqueHoleNames[headerNameItem.Trim()]; uniqueHoleNames[headerNameItem.Trim()] = val++; holeWarningMessages.Add(headerNameItem+" ("+val+")", "Hole: " + headerNameItem + " at line " + linesRead + " already exists in the input file - skipping."); mos.recordsFailed++; continue; } else { uniqueHoleNames.Add(headerNameItem.Trim(), 1); } // check if this hole exists in the database if (existingHoleNames.Contains(headerNameItem.Trim())) { if (!holeWarningMessages.ContainsKey(headerNameItem)) { holeWarningMessages.Add(headerNameItem, "Hole: " + headerNameItem + " already exists in this project. Skipping record at line " + linesRead + "."); if(!overwrite){ mos.recordsFailed ++; } } else { } mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; hasDuplicateHoles = true; rejectedLines.Add(items); continue; } #region mappsearch // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { bool isFKColumn = cmap.hasFKRelation; int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = items[colID]; } string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; if (isFKColumn) { // go and search for the appropriate value from the foreign key table string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true); if (newValue == null) { clauseParameters += "NULL,"; } else { clauseParameters += "\'" + newValue + "\',"; } } else { if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-") || columnValue.Trim().Length == 0) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } clauseParameters += columnValue + ","; } else if (cmap.importDataType.Equals(ImportDataMap.TIMESTAMPDATATYPE)) { DateTime dtr = new DateTime(); bool parsed = DateTime.TryParse(columnValue, out dtr); if (parsed) { columnValue = "\'" + dtr.ToString("yyyy-MM-dd hh:mm:ss tt") + "\'"; } else { columnValue = "NULL"; } clauseParameters += columnValue + ","; } else { if (columnValue.Equals("-") || columnValue.Trim().Length == 0) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } } clauseParameters += "\'" + columnValue + "\',"; } } } #endregion // now just a hack to remove the final coma from the query clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1); clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")"; SqlCommand sqc = new SqlCommand(commandText, connection, trans); numberOfHolesAdded++; if (commitToDB) { try { sqc.ExecuteNonQuery(); } catch (Exception ex) { string err = "" + ex.ToString(); throw ex; } } tb++; if (tb == transactionBatchLimit) { // commit batch, then renew the transaction if (commitToDB) { trans.Commit(); numCommits++; // trans = null; trans = connection.BeginTransaction(); } // reset counter tb = 0; } } ct++; } } if (tb > 0) { if (commitToDB) { trans.Commit(); } numCommits++; } mos.recordsAdded = numberOfHolesAdded; UpdateStatus("Finished writing collars to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing collars to database ", 0); mos.AddErrorMessage("Error writing collar data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } if (hasDuplicateHoles == true && overwrite == true) { OverwriteCollarRecord(mos, rejectedLines , importMap, connectionString, XODBProjectID, UpdateStatus, holeWarningMessages); } foreach (KeyValuePair<string, string> kvp in holeWarningMessages) { string v = kvp.Value; mos.AddWarningMessage(v); } mos.linesReadFromSource = linesRead; }
internal void AddSurveyData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid NKDProjectID, bool overwrite, bool checkForDuplicates) { bool duplicateFound = false; // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; Dictionary<Guid, List<string>> rejectedLines = new Dictionary<Guid, List<string>>(); Dictionary<string, string> holeWarningMessages = new Dictionary<string, string>(); using (var entityObj = new NKDC(connectionString, null)) { //entityObj.Configuration.AutoDetectChangesEnabled = false; SurveyQueries sq = new SurveyQueries(); // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); int numCommits = 0; SqlTransaction trans; //trans = connection.BeginTransaction(System.Data.IsolationLevel.Snapshot); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; // get the column containing the hole name Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); int numberOfHolesAdded = 0; ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); ColumnMap depthCmap = importMap.FindItemsByTargetName("Depth"); float percentComplete = 0; int headerIDX = headerCmap.sourceColumnNumber; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; percentComplete = ((float)ct / approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { string statementPart1 = "INSERT INTO " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; string clauseParameters = ""; List<string> items = parseTestLine(line, importMap.inputDelimiter); // using the column map, pick out the hole name field and see if it is in the database already string headerNameItem = items[headerIDX]; // check if this holename is a duplicate in the file bool foundHole = false; Guid holeID = new Guid(); bool lv = holeIDLookups.ContainsKey(headerNameItem); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(headerNameItem, headerCmap, secondaryConnection, false, "HoleName", NKDProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(headerNameItem, holeID); } } else { holeIDLookups.TryGetValue(headerNameItem, out holeID); foundHole = true; } if (!foundHole) { mos.AddWarningMessage("Failed to find hole " + headerNameItem + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } if (checkForDuplicates == true && depthCmap != null) { // check for duplicate depths string d = items[depthCmap.sourceColumnNumber]; decimal dt = 0; bool isParsed = decimal.TryParse(d, out dt); if (isParsed) { List<Guid> rr = sq.CheckForDuplicate(holeID, dt, secondaryConnection); //List<Guid> rr = sq.CheckForDuplicate(holeID, dt, entityObj); if (rr.Count > 0) { duplicateFound = true; if (!rejectedLines.ContainsKey(rr.First())) { rejectedLines.Add(rr.First(), items); mos.AddWarningMessage("Duplicate depth found in survey data for hole " + headerNameItem + " at depth " + d + " on line " + linesRead); UpdateStatus("Duplicate depth found in survey data for hole " + headerNameItem + " at depth " + d, percentComplete); } else { mos.AddWarningMessage("Duplicate depth found in survey data file for hole " + headerNameItem + " at depth " + d + " on line " + linesRead); UpdateStatus("Duplicate depth found in survey data file for hole " + headerNameItem + " at depth " + d, percentComplete); rejectedLines[rr.First()] = items; } if (!overwrite) { mos.recordsFailed++; } continue; } } } #region mappsearch // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { if (cmap.targetColumnName.Trim().Equals("HeaderID")) { string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; clauseParameters += "\'" + holeID.ToString() + "\',"; } else { bool isFKColumn = cmap.hasFKRelation; int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = items[colID]; } string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; if (isFKColumn) { // go and search for the appropriate value from the foreign key table string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true); columnValue = newValue; if (newValue != null && newValue.Trim().Length > 0) { clauseParameters += "\'" + columnValue + "\',"; } else { clauseParameters += "NULL,"; } } else { if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-") || columnValue.Equals("")) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } clauseParameters += columnValue + ","; } else { //if (columnValue.Equals("-")) //{ // if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) // { // columnValue = cmap.defaultValue; // } //} clauseParameters += "\'" + columnValue + "\',"; } } } } #endregion // now just a hack to remove the final coma from the query clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1); clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")"; //SqlCommand sqc = new SqlCommand(commandText, connection, trans); SqlCommand sqc = new SqlCommand(commandText, connection); numberOfHolesAdded++; if (commitToDB) { try { sqc.ExecuteNonQuery(); } catch (Exception ex) { mos.AddErrorMessage("Failed to insert items on line " + linesRead + "."); UpdateStatus("Failed to insert items on line " + linesRead + ".", percentComplete); mos.recordsFailed++; mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } UpdateStatus("Updating from line " + linesRead, percentComplete); tb++; //if (tb == transactionBatchLimit) //{ // // commit batch, then renew the transaction // if (commitToDB) // { // trans.Commit(); // numCommits++; // // trans = null; // trans = connection.BeginTransaction(System.Data.IsolationLevel.Snapshot); // } // // reset counter // tb = 0; //} } ct++; } } if (tb > 0) { //if (commitToDB) //{ // trans.Commit(); //} numCommits++; } mos.recordsAdded = numberOfHolesAdded; UpdateStatus("Finished writing records to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing records to database ", 0); mos.AddErrorMessage("Error writing records data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } if (duplicateFound == true && overwrite == true) { OverwriteSurveyRecord(mos, rejectedLines, importMap, connectionString, NKDProjectID, UpdateStatus, holeWarningMessages); } foreach (KeyValuePair<string, string> kvp in holeWarningMessages) { string v = kvp.Value; mos.AddWarningMessage(v); } mos.linesReadFromSource = linesRead; } }
internal void AddCoalQualityData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid XODBProjectID, bool checkForDuplicates, bool doImportOverwrite) { WorkflowProcedureSequenceNumber = 1; Guid? lastHoleID = new Guid(); string lastStage = ""; decimal lastFromDepth = -999999; decimal lastToDepth = -999999; bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new XODBC(connectionString, null)) { Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = XODBProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary<ColumnMap, Guid> resultsColumns = new Dictionary<ColumnMap, Guid>(); Dictionary<Guid, AssayGroupTest> assayGroups = new Dictionary<Guid, AssayGroupTest>(); ColumnMap cmProgram = null; ColumnMap cmStage = null; ColumnMap cmSizeFraction = null; ColumnMap cmWashFraction = null; foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[RESULT")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } else if (cim.targetColumnName.Trim().StartsWith("[PROGRAM")) { cmProgram = cim; } else if (cim.targetColumnName.Trim().StartsWith("[STAGE")) { cmStage = cim; } else if (cim.targetColumnName.Trim().StartsWith("[SIZE FRACTION")) { cmSizeFraction = cim; } else if (cim.targetColumnName.Trim().StartsWith("[WASH FRACTION")) { cmWashFraction = cim; } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameter("AssayTypeName", cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float pct = 0; float bct = 1; // report every X records int repCount = 0; float fNumLines = (float)approxNumLines; Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); Dictionary<string, int> columnIDX = new Dictionary<string, int>(); int fkLookupCount = 0; PopulateCMapShortcut("HeaderID", importMap, columnIDX); PopulateCMapShortcut("FromDepth", importMap, columnIDX); PopulateCMapShortcut("ToDepth", importMap, columnIDX); PopulateCMapShortcut("SampleNumber", importMap, columnIDX); PopulateCMapShortcut("SampleName", importMap, columnIDX); PopulateCMapShortcut("LabSampleNumber", importMap, columnIDX); PopulateCMapShortcut("LabBatchNumber", importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); AssayQueries assayQueries = new AssayQueries(); int seqNum = 1; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { // digest a row of input data List<string> items = parseTestLine(line, importMap.inputDelimiter); Guid holeID = new Guid(); Decimal fromDepth = new Decimal(-9999999999); Decimal toDepth = new Decimal(-9999999999); string sampleNumber = null; string sampleName = null; string labBatchNumber = null; string labsampleNumber = null; // find mapped values by name int idxVal = 0; // -- Get the hole ID foreign key relation bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, XODBProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } List<Sample> duplicateList = null; bool isDuplicateInterval = false; //if (checkForDuplicates) //{ // if (hasFrom && hasTo) // { // // here we need to check that the hole is not duplicated // duplicateList = assayQueries.CheckForDuplicate(holeID, fromDepth, toDepth); // if (duplicateList.Count > 0) // { // isDuplicateInterval = true; // } // } // if (isDuplicateInterval) // { // hasDuplicateIntervals = true; // mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); // UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); // if (!doImportOverwrite) // { // mos.recordsFailed++; // continue; // } // } //} idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleName = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } Sample xs = new Sample(); if (isDuplicateInterval == true) { xs = duplicateList.First(); } else { xs.SampleID = Guid.NewGuid(); xs.SampleName = sampleName; xs.SampleNumber = sampleNumber; xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; entityObj.Samples.AddObject(xs); } // Now iddentify the program, Stage, Size fraction and wash fraction // get the program text string programType = null; if (cmProgram != null) { programType = items[cmProgram.sourceColumnNumber]; } string stage = null; if (cmStage != null) { stage = items[cmStage.sourceColumnNumber]; } string sizeFraction = null; if (cmSizeFraction != null) { sizeFraction = items[cmSizeFraction.sourceColumnNumber]; } string washFraction = null; if (cmWashFraction != null) { washFraction = items[cmWashFraction.sourceColumnNumber]; } // see if the interfal has changed, wherby we will need to reset the sequence ID if (holeID != lastHoleID) { if (fromDepth != lastFromDepth && toDepth != lastToDepth) { // new interval WorkflowProcedureSequenceNumber = 1; } } if (!stage.Trim().Equals(lastStage)) { WorkflowProcedureSequenceNumber = 1; } lastHoleID = holeID; lastFromDepth = fromDepth; lastToDepth = toDepth; lastStage = stage; AssayGroupWorkflow agWorkflowProgram = GetAssayGroupWorkflow(entityObj, programType, agGuid); AssayGroupWorkflowProcedure agWorkflowStage = GetAssayGroupWorkflowProcedure(entityObj, stage, agWorkflowProgram); AssayGroupSubsample agSS = new AssayGroupSubsample(); agSS.AssayGroupID = agGuid; agSS.Sequence = seqNum; agSS.AssayGroupSubsampleID = Guid.NewGuid(); agSS.SampleAntecedentID = xs.SampleID; agSS.OriginalSample = xs; agSS.AssayGroupWorkflowProcedureID = agWorkflowStage.AssayGroupWorkflowProcedureID; agSS.AssayGroupWorkflowProcedure = agWorkflowStage; entityObj.AssayGroupSubsamples.AddObject(agSS); entityObj.SaveChanges(); seqNum++; AssayGroupSubsamplePrecondition agSizeFraction = GetAssayGroupPrecondition(entityObj, sizeFraction, "Size fraction", agSS.AssayGroupSubsampleID); AssayGroupSubsamplePrecondition agWashFraction = GetAssayGroupPrecondition(entityObj, washFraction, "Wash fraction", agSS.AssayGroupSubsampleID); // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns bool assayUpdated = false; bool assayAdded = false; foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = new AssayGroupTestResult(); testResult.AssayGroupSubsampleID = agSS.AssayGroupSubsampleID; testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.VersionUpdated = currentUpdateTimestamp; testResult.LabBatchNumber = labBatchNumber; //testResult.LabSampleNumber = labsampleNumber; Decimal result = new Decimal(); if (items.Count >= cm.sourceColumnNumber) { bool parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); if (parsedOK) { testResult.LabResult = result; } else { testResult.LabResultText = items[cm.sourceColumnNumber]; } } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } entityObj.AssayGroupTestResults.AddObject(testResult); assayAdded = true; } if (assayAdded == true) { mos.recordsAdded++; } if (assayUpdated) { mos.recordsUpdated++; } tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing coal quality data to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing qualities to database ", 0); mos.AddErrorMessage("Error writing data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }
private static bool FormatString(string format, object[] args, out List<string> segments) { segments = null; MatchCollection matches = _formatRegex.Matches(format); if (matches.Count == 0) return false; int lastEnd = 0; int argPos = 0; segments = new List<string>(); foreach (Match m in matches) { if (lastEnd < m.Index) { segments.Add(format.Substring(lastEnd, m.Index - lastEnd)); } FormatSpecification fs = new FormatSpecification(m); if (fs.Width == int.MinValue) { if (!(argPos < args.Length) || !(args[argPos] is int)) throw new Exception("printf width parameter was not provided"); fs.Width = (int)args[argPos]; argPos = argPos + 1; } if (fs.Precision == int.MinValue) { if (!(argPos < args.Length) || !(args[argPos] is int)) throw new Exception("printf precision parameter was not provided"); fs.Precision = (int)args[argPos]; argPos = argPos + 1; } segments.Add(fs.Format(args[argPos])); argPos++; lastEnd = m.Index + m.Value.Length; } if (lastEnd < format.Length) { segments.Add(format.Substring(lastEnd)); } return true; }
private static FormatSpecification ParseFormatSpec(Match match) { FormatSpecification fs = new FormatSpecification(); string str = match.Groups[2].Value; if (str.Contains("-")) fs.Flags |= Flags.LEFT_ALIGNED; if (str.Contains("+")) fs.Flags |= Flags.FORCE_SIGN; if (str.Contains("#")) fs.Flags |= Flags.ALTERNATE; if (str.Contains("0")) fs.Flags |= Flags.LEADING_ZERO_FILL; if (str.Contains(",")) fs.Flags |= Flags.GROUP_THOUSANDS; if (str.Contains(" ")) { if ((fs.Flags & Flags.FORCE_SIGN) == 0) { //force + overrides the invisible zero sign, set INVISIBLE_PLUS_SIGN //only if FORCE_SIGN is not set fs.Flags = Flags.INVISIBLE_PLUS_SIGN; } } //parse the width field str = match.Groups[3].Value; if (!string.IsNullOrEmpty(str)) { if (str == "*") { //make the value as int.MinValue which will be resolved later by reading //the preceding argument fs.Width = int.MinValue; } else { try { fs.Width = int.Parse(str); } catch (Exception ex) { throw new Exception("printf invalid width parameter", ex); } } } //parse the precision field str = match.Groups[4].Value; if (!string.IsNullOrEmpty(str)) { if (str == "*") { //make the value as int.MinValue which will be resolved later by reading //the preceding argument fs.Precision = int.MinValue; } else { try { fs.Precision = int.Parse(str); } catch (Exception ex) { throw new Exception("printf invalid precision parameter", ex); } } } //parse the length field str = match.Groups[5].Value; if (!string.IsNullOrEmpty(str)) { switch (str) { case "h": fs.Length = LengthType.SHORT; break; case "l": fs.Length = LengthType.LONG; break; default: fs.Length = LengthType.NONE; break; } } //the format string fs.FomratString = match.Groups[7].Value; //parse the specifier str = match.Groups[8].Value; switch (str[0]) { case 'd': case 'i': fs.Specifier = SpecifierType.SIGNED_INT; //d, i break; case 'u': fs.Specifier = SpecifierType.UNSIGNED_INT; //u break; case 'o': fs.Specifier = SpecifierType.UNSIGNED_OCT; //o break; case 'x': fs.Specifier = SpecifierType.UNSINGED_HEX; //x break; case 'X': fs.Specifier = SpecifierType.UNSIGNED_HEX_UPPER; //X break; case 'f': fs.Specifier = SpecifierType.FLOAT; //f break; case 'F': fs.Specifier = SpecifierType.FLOAT_UPPER; //F break; case 'e': fs.Specifier = SpecifierType.SCIENTIFIC; //e break; case 'E': fs.Specifier = SpecifierType.SCIENTIFIC_UPPER; //E break; case 'g': fs.Specifier = SpecifierType.GENERAL; //g break; case 'G': fs.Specifier = SpecifierType.GENERAL_UPPER; //G break; case 'c': fs.Specifier = SpecifierType.CHAR; //c break; case 's': fs.Specifier = SpecifierType.STRING; //s break; case 't': fs.Specifier = SpecifierType.DATE_TIME; //t break; case '%': fs.Specifier = SpecifierType.PERCENT; //% break; } return fs; }