internal List<AssayGroupTestResult> GetDuplicateResult(Dictionary<Guid, AssayGroupTest> assayGroups, Guid sampleID, Guid assayGroupTestID) { using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { List<AssayGroupTestResult> resultData = new List<AssayGroupTestResult>(); AssayGroupTest xx = assayGroups[assayGroupTestID]; string testName = xx.AssayTestName; Guid testParamID = (Guid)xx.ParameterID; IQueryable<AssayGroupTestResult> res = entityObj.AssayGroupTestResults.Where(c => c.SampleID == sampleID); bool foundDupe = false; foreach (AssayGroupTestResult xx2 in res) { IQueryable<AssayGroupTest> res2 = entityObj.AssayGroupTests.Where(c => c.AssayGroupID == xx2.AssayGroupTestID); foreach (AssayGroupTest agt in res2) { if (agt.ParameterID == testParamID) { foundDupe = true; break; } } if (foundDupe) { resultData.Add(xx2); break; } } return resultData; } }
public void TestSpatial() { using (new TransactionScope(TransactionScopeOption.Suppress)) { using (var context = new XODBC(_users.ApplicationConnectionString,null)) { var s = (from o in context.Locations select o).FirstOrDefault(); //context.Universities.Add(new University() //{ // Name = "Graphic Design Institute", // Location = DbGeography.FromText("POINT(-122.336106 47.605049)"), //}); //context.SaveChanges(); //var myLocation = DbGeography.FromText("POINT(-122.296623 47.640405)"); //var university = (from u in context.Universities // orderby u.Location.Distance(myLocation) // select u).FirstOrDefault(); } } }
internal static List<CollarInfo> FindCollarsForProject(Guid currentSelectedProject) { List<CollarInfo> ss = new List<CollarInfo>(); using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { IQueryable<Header> res = entityObj.Headers.Where(c => c.ProjectID == currentSelectedProject); foreach (Header xx in res) { CollarInfo ci = new CollarInfo(); ci.Name = xx.HoleName; if (xx.EastingUtm != null) { ci.Easting = (double)xx.EastingUtm; } if (xx.NorthingUtm != null) { ci.Northing = (double)xx.NorthingUtm; } if (xx.Elevation != null) { ci.RL = (double)xx.Elevation; } ss.Add(ci); } return ss; } }
internal DictionaryUnit FindUnits(string theUnit) { var entityObj = new XODBC(BaseImportTools.XSTRING, null); DictionaryUnit xd = (from c in entityObj.DictionaryUnits where c.StandardUnitName.Trim().Equals(theUnit) select c).FirstOrDefault(); return xd; }
internal List<AssayGroupTestResult> GetDuplicateResult(Guid sampleID, string columnName) { using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { List<AssayGroupTestResult> resultData = new List<AssayGroupTestResult>(); bool foundDupe = false; IQueryable<AssayGroupTestResult> res = null; if (resultsCache1.ContainsKey(sampleID)) { res = resultsCache1[sampleID]; } else { res = entityObj.AssayGroupTestResults.Where(c => c.SampleID == sampleID); resultsCache1.Add(sampleID, res); } foreach (AssayGroupTestResult xx2 in res) { Guid assayGroupTestOfSample = xx2.AssayGroupTestID; // now query the assay groups tests for this sample IQueryable<AssayGroupTest> res2 = null; if (resultsCache2.ContainsKey(assayGroupTestOfSample)) { res2 = resultsCache2[assayGroupTestOfSample]; } else { res2 = entityObj.AssayGroupTests.Where(c => c.AssayGroupTestID == assayGroupTestOfSample); resultsCache2.Add(assayGroupTestOfSample, res2); } foreach (AssayGroupTest agt in res2) { // these are teh assay test groups if (agt.AssayTestName.Trim().Equals(columnName)) { foundDupe = true; break; } } if (foundDupe) { resultData.Add(xx2); break; } } return resultData; } }
internal static Guid FindHeaderGuid(string headerNameItem, Guid currentSelectedProject) { Guid resHole = new Guid(); using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { IQueryable<Header> res = entityObj.Headers.Where(c => (c.ProjectID == currentSelectedProject) && (c.HoleName.Equals(headerNameItem))); foreach (Header xx in res) { resHole = xx.HeaderID; } return resHole; } }
internal List<Guid> CheckForDuplicate(Guid holeID, decimal depth, XODBC eo) { List<Guid> results = new List<Guid>(); IQueryable<Survey> res = eo.Surveys.Where(c => c.HeaderID == holeID && c.Depth == depth); foreach (Survey xs in res) { results.Add(xs.SurveyID); } return results; }
internal List<Sample> CheckForDuplicate(Guid holeID, decimal fromDepth, decimal toDepth) { using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { List<Sample> resultList = new List<Sample>(); bool found = false; IQueryable<Sample> res = entityObj.Samples.Where(c => c.HeaderID == holeID && c.FromDepth == fromDepth && c.ToDepth == toDepth); if (res != null && res.Count() > 0) { found = true; } foreach (Sample xx in res) { found = true; resultList.Add(xx); break; } return resultList; } }
internal static Dictionary<string, Guid> FindHeaderGuidsForProject(Guid XODBProjectID) { Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { IQueryable<Header> res = entityObj.Headers.Where(c => (c.ProjectID == XODBProjectID)); foreach (Header xx in res) { Guid resHole = xx.HeaderID; string ss = xx.HoleName; // only add if it does not exist bool exists = holeIDLookups.ContainsKey(ss); if (!exists) { holeIDLookups.Add(ss, resHole); } } return holeIDLookups; } }
private AssayGroupTest FindExistingAssayGroupTest(string p) { using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { AssayGroupTest resAssGroup = null; IQueryable<AssayGroupTest> res = entityObj.AssayGroupTests.Where(c => c.AssayTestName.Trim().Equals(p.Trim())); foreach (AssayGroupTest xx in res) { resAssGroup = xx; } return resAssGroup; } }
private Parameter GetParameterIDFor(XODBC entityObj, string paramType, string paramName) { Parameter res = null; IQueryable<Parameter> resGP = entityObj.Parameters.Where(c => c.ParameterType.Trim().Equals(paramType) && c.ParameterName.Trim().Equals(paramName)); foreach (Parameter xx in resGP) { res = xx; break; } return res; }
internal void AddSurveyData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid XODBProjectID, bool overwrite, bool checkForDuplicates) { bool duplicateFound = false; // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; Dictionary<Guid, List<string>> rejectedLines = new Dictionary<Guid, List<string>>(); Dictionary<string, string> holeWarningMessages = new Dictionary<string, string>(); using (var entityObj = new XODBC(connectionString, null)) { //entityObj.Configuration.AutoDetectChangesEnabled = false; SurveyQueries sq = new SurveyQueries(); // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); int numCommits = 0; SqlTransaction trans; //trans = connection.BeginTransaction(System.Data.IsolationLevel.Snapshot); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; // get the column containing the hole name Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); int numberOfHolesAdded = 0; ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); ColumnMap depthCmap = importMap.FindItemsByTargetName("Depth"); float percentComplete = 0; int headerIDX = headerCmap.sourceColumnNumber; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; percentComplete = ((float)ct / approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { string statementPart1 = "INSERT INTO " + importMap.mapTargetPrimaryTable + " "; string clauseValues = ""; string clauseParameters = ""; List<string> items = parseTestLine(line, importMap.inputDelimiter); // using the column map, pick out the hole name field and see if it is in the database already string headerNameItem = items[headerIDX]; // check if this holename is a duplicate in the file bool foundHole = false; Guid holeID = new Guid(); bool lv = holeIDLookups.ContainsKey(headerNameItem); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(headerNameItem, headerCmap, secondaryConnection, false, "HoleName", XODBProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(headerNameItem, holeID); } } else { holeIDLookups.TryGetValue(headerNameItem, out holeID); foundHole = true; } if (!foundHole) { mos.AddWarningMessage("Failed to find hole " + headerNameItem + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } if (checkForDuplicates == true && depthCmap != null) { // check for duplicate depths string d = items[depthCmap.sourceColumnNumber]; decimal dt = 0; bool isParsed = decimal.TryParse(d, out dt); if (isParsed) { List<Guid> rr = sq.CheckForDuplicate(holeID, dt, secondaryConnection); //List<Guid> rr = sq.CheckForDuplicate(holeID, dt, entityObj); if (rr.Count > 0) { duplicateFound = true; if (!rejectedLines.ContainsKey(rr.First())) { rejectedLines.Add(rr.First(), items); mos.AddWarningMessage("Duplicate depth found in survey data for hole " + headerNameItem + " at depth " + d + " on line " + linesRead); UpdateStatus("Duplicate depth found in survey data for hole " + headerNameItem + " at depth " + d, percentComplete); } else { mos.AddWarningMessage("Duplicate depth found in survey data file for hole " + headerNameItem + " at depth " + d + " on line " + linesRead); UpdateStatus("Duplicate depth found in survey data file for hole " + headerNameItem + " at depth " + d, percentComplete); rejectedLines[rr.First()] = items; } if (!overwrite) { mos.recordsFailed++; } continue; } } } #region mappsearch // now pick out all the mapped values foreach (ColumnMap cmap in importMap.columnMap) { if (cmap.targetColumnName.Trim().Equals("HeaderID")) { string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; clauseParameters += "\'" + holeID.ToString() + "\',"; } else { bool isFKColumn = cmap.hasFKRelation; int colID = cmap.sourceColumnNumber; string columnValue = cmap.defaultValue; if (colID >= 0) { columnValue = items[colID]; } string targetCol = cmap.targetColumnName; string targetTable = cmap.targetColumnTable; clauseValues += "" + targetTable + "." + targetCol + ","; if (isFKColumn) { // go and search for the appropriate value from the foreign key table string newValue = ForeignKeyUtils.FindFKValueInDictionary(columnValue, cmap, secondaryConnection, true); columnValue = newValue; if (newValue != null && newValue.Trim().Length > 0) { clauseParameters += "\'" + columnValue + "\',"; } else { clauseParameters += "NULL,"; } } else { if (cmap.importDataType.Equals(ImportDataMap.NUMERICDATATYPE)) { if (columnValue.Equals("-") || columnValue.Equals("")) { if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) { columnValue = cmap.defaultValue; } else { columnValue = "NULL"; } } clauseParameters += columnValue + ","; } else { //if (columnValue.Equals("-")) //{ // if (cmap.defaultValue != null && cmap.defaultValue.Length > 0) // { // columnValue = cmap.defaultValue; // } //} clauseParameters += "\'" + columnValue + "\',"; } } } } #endregion // now just a hack to remove the final coma from the query clauseParameters = clauseParameters.Substring(0, clauseParameters.Length - 1); clauseValues = clauseValues.Substring(0, clauseValues.Length - 1); string commandText = statementPart1 + "(" + clauseValues + ") VALUES (" + clauseParameters + ")"; //SqlCommand sqc = new SqlCommand(commandText, connection, trans); SqlCommand sqc = new SqlCommand(commandText, connection); numberOfHolesAdded++; if (commitToDB) { try { sqc.ExecuteNonQuery(); } catch (Exception ex) { mos.AddErrorMessage("Failed to insert items on line " + linesRead + "."); UpdateStatus("Failed to insert items on line " + linesRead + ".", percentComplete); mos.recordsFailed++; mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } UpdateStatus("Updating from line " + linesRead, percentComplete); tb++; //if (tb == transactionBatchLimit) //{ // // commit batch, then renew the transaction // if (commitToDB) // { // trans.Commit(); // numCommits++; // // trans = null; // trans = connection.BeginTransaction(System.Data.IsolationLevel.Snapshot); // } // // reset counter // tb = 0; //} } ct++; } } if (tb > 0) { //if (commitToDB) //{ // trans.Commit(); //} numCommits++; } mos.recordsAdded = numberOfHolesAdded; UpdateStatus("Finished writing records to database ", 100.0); } catch (Exception ex) { UpdateStatus("Error writing records to database ", 0); mos.AddErrorMessage("Error writing records data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } if (duplicateFound == true && overwrite == true) { OverwriteSurveyRecord(mos, rejectedLines, importMap, connectionString, XODBProjectID, UpdateStatus, holeWarningMessages); } foreach (KeyValuePair<string, string> kvp in holeWarningMessages) { string v = kvp.Value; mos.AddWarningMessage(v); } mos.linesReadFromSource = linesRead; } }
private AssayGroupWorkflow GetAssayGroupWorkflow(XODBC entityObj, string programType, Guid? assayGroupID) { AssayGroupWorkflow agw = null; IQueryable<AssayGroupWorkflow> res = entityObj.AssayGroupWorkflows.Where(c => c.WorkflowName.Trim().Equals(programType.Trim()) && c.AssayGroupID == assayGroupID); foreach (AssayGroupWorkflow xx in res) { agw = xx; } if (agw == null) { agw = new AssayGroupWorkflow(); agw.AssayGroupID = assayGroupID; agw.AssayGroupWorkflowID = Guid.NewGuid(); agw.WorkflowName = programType; entityObj.AssayGroupWorkflows.AddObject(agw); entityObj.SaveChanges(); } return agw; }
internal void AddCoalQualityData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid XODBProjectID, bool checkForDuplicates, bool doImportOverwrite) { WorkflowProcedureSequenceNumber = 1; Guid? lastHoleID = new Guid(); string lastStage = ""; decimal lastFromDepth = -999999; decimal lastToDepth = -999999; bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new XODBC(connectionString, null)) { Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = XODBProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary<ColumnMap, Guid> resultsColumns = new Dictionary<ColumnMap, Guid>(); Dictionary<Guid, AssayGroupTest> assayGroups = new Dictionary<Guid, AssayGroupTest>(); ColumnMap cmProgram = null; ColumnMap cmStage = null; ColumnMap cmSizeFraction = null; ColumnMap cmWashFraction = null; foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[RESULT")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } else if (cim.targetColumnName.Trim().StartsWith("[PROGRAM")) { cmProgram = cim; } else if (cim.targetColumnName.Trim().StartsWith("[STAGE")) { cmStage = cim; } else if (cim.targetColumnName.Trim().StartsWith("[SIZE FRACTION")) { cmSizeFraction = cim; } else if (cim.targetColumnName.Trim().StartsWith("[WASH FRACTION")) { cmWashFraction = cim; } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameter("AssayTypeName", cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float pct = 0; float bct = 1; // report every X records int repCount = 0; float fNumLines = (float)approxNumLines; Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); Dictionary<string, int> columnIDX = new Dictionary<string, int>(); int fkLookupCount = 0; PopulateCMapShortcut("HeaderID", importMap, columnIDX); PopulateCMapShortcut("FromDepth", importMap, columnIDX); PopulateCMapShortcut("ToDepth", importMap, columnIDX); PopulateCMapShortcut("SampleNumber", importMap, columnIDX); PopulateCMapShortcut("SampleName", importMap, columnIDX); PopulateCMapShortcut("LabSampleNumber", importMap, columnIDX); PopulateCMapShortcut("LabBatchNumber", importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); AssayQueries assayQueries = new AssayQueries(); int seqNum = 1; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { // digest a row of input data List<string> items = parseTestLine(line, importMap.inputDelimiter); Guid holeID = new Guid(); Decimal fromDepth = new Decimal(-9999999999); Decimal toDepth = new Decimal(-9999999999); string sampleNumber = null; string sampleName = null; string labBatchNumber = null; string labsampleNumber = null; // find mapped values by name int idxVal = 0; // -- Get the hole ID foreign key relation bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, XODBProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } List<Sample> duplicateList = null; bool isDuplicateInterval = false; //if (checkForDuplicates) //{ // if (hasFrom && hasTo) // { // // here we need to check that the hole is not duplicated // duplicateList = assayQueries.CheckForDuplicate(holeID, fromDepth, toDepth); // if (duplicateList.Count > 0) // { // isDuplicateInterval = true; // } // } // if (isDuplicateInterval) // { // hasDuplicateIntervals = true; // mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); // UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); // if (!doImportOverwrite) // { // mos.recordsFailed++; // continue; // } // } //} idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleName = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } Sample xs = new Sample(); if (isDuplicateInterval == true) { xs = duplicateList.First(); } else { xs.SampleID = Guid.NewGuid(); xs.SampleName = sampleName; xs.SampleNumber = sampleNumber; xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; entityObj.Samples.AddObject(xs); } // Now iddentify the program, Stage, Size fraction and wash fraction // get the program text string programType = null; if (cmProgram != null) { programType = items[cmProgram.sourceColumnNumber]; } string stage = null; if (cmStage != null) { stage = items[cmStage.sourceColumnNumber]; } string sizeFraction = null; if (cmSizeFraction != null) { sizeFraction = items[cmSizeFraction.sourceColumnNumber]; } string washFraction = null; if (cmWashFraction != null) { washFraction = items[cmWashFraction.sourceColumnNumber]; } // see if the interfal has changed, wherby we will need to reset the sequence ID if (holeID != lastHoleID) { if (fromDepth != lastFromDepth && toDepth != lastToDepth) { // new interval WorkflowProcedureSequenceNumber = 1; } } if (!stage.Trim().Equals(lastStage)) { WorkflowProcedureSequenceNumber = 1; } lastHoleID = holeID; lastFromDepth = fromDepth; lastToDepth = toDepth; lastStage = stage; AssayGroupWorkflow agWorkflowProgram = GetAssayGroupWorkflow(entityObj, programType, agGuid); AssayGroupWorkflowProcedure agWorkflowStage = GetAssayGroupWorkflowProcedure(entityObj, stage, agWorkflowProgram); AssayGroupSubsample agSS = new AssayGroupSubsample(); agSS.AssayGroupID = agGuid; agSS.Sequence = seqNum; agSS.AssayGroupSubsampleID = Guid.NewGuid(); agSS.SampleAntecedentID = xs.SampleID; agSS.OriginalSample = xs; agSS.AssayGroupWorkflowProcedureID = agWorkflowStage.AssayGroupWorkflowProcedureID; agSS.AssayGroupWorkflowProcedure = agWorkflowStage; entityObj.AssayGroupSubsamples.AddObject(agSS); entityObj.SaveChanges(); seqNum++; AssayGroupSubsamplePrecondition agSizeFraction = GetAssayGroupPrecondition(entityObj, sizeFraction, "Size fraction", agSS.AssayGroupSubsampleID); AssayGroupSubsamplePrecondition agWashFraction = GetAssayGroupPrecondition(entityObj, washFraction, "Wash fraction", agSS.AssayGroupSubsampleID); // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns bool assayUpdated = false; bool assayAdded = false; foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = new AssayGroupTestResult(); testResult.AssayGroupSubsampleID = agSS.AssayGroupSubsampleID; testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.VersionUpdated = currentUpdateTimestamp; testResult.LabBatchNumber = labBatchNumber; //testResult.LabSampleNumber = labsampleNumber; Decimal result = new Decimal(); if (items.Count >= cm.sourceColumnNumber) { bool parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); if (parsedOK) { testResult.LabResult = result; } else { testResult.LabResultText = items[cm.sourceColumnNumber]; } } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } entityObj.AssayGroupTestResults.AddObject(testResult); assayAdded = true; } if (assayAdded == true) { mos.recordsAdded++; } if (assayUpdated) { mos.recordsUpdated++; } tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing coal quality data to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing qualities to database ", 0); mos.AddErrorMessage("Error writing data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }
public bool CheckOwnership(ISecured secured, ActionPermission permission) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); var verified = new System.Data.Objects.ObjectParameter("verified", typeof(byte)); c.X_SP_GetSecuredRight(secured.OwnerContactID, secured.OwnerApplicationID, secured.OwnerTableType, secured.OwnerReferenceID, secured.OwnerField, secured.CanRead || ((ActionPermission.Read & permission) == ActionPermission.Read) , secured.CanCreate || ((ActionPermission.Create & permission) == ActionPermission.Create) , secured.CanUpdate || ((ActionPermission.Update & permission) == ActionPermission.Update) , secured.CanDelete || ((ActionPermission.Delete & permission) == ActionPermission.Delete) , verified); return (bool)verified.Value; } }
public bool IsValidInXODB(string username) { if (string.IsNullOrEmpty(username)) return false; using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); if (default(Guid) == (from u in c.Users join contacts in c.Contacts on u.UserId equals contacts.AspNetUserID where u.UserName == username && contacts.Username == username && contacts.Version == 0 && contacts.VersionDeletedBy == null select contacts.ContactID).SingleOrDefault()) return false; else return true; } }
private Authority AdminAuthority = new Authority(new Guid("370846E4-36DC-4BAC-8FB5-C788C730BB45"), "admin"); //Admin service account id, different from contact who is admin public Authority BuildAuthority(Guid? contactID) { if (!IsValidInXODB() || !contactID.HasValue) throw new AuthorityException("No authority to connect to XODB."); using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); var username = (from o in c.Contacts where o.ContactID == contactID && o.Version==0 && o.VersionDeletedBy==null select o.Username).Single(); var userID = (from o in c.Users where o.ApplicationId == ApplicationID && o.UserName == username select o.UserId).Single(); var r = (from o in c.Users from x in o.aspnet_Roles where o.UserId==userID select x.RoleId); var myCompanies = (from o in c.Experiences where o.ContactID==contactID && o.CompanyID!=null select o.CompanyID).ToArray(); var allCompanies = new List<Guid>(); var rootCompanies = new List<Guid>(); using (DataTable table = new DataTable()) { using (var con = new SqlConnection(ApplicationConnectionString)) using(var cmd = new SqlCommand("X_SP_GetCompanies", con)) using(var da = new SqlDataAdapter(cmd)) { cmd.CommandType = CommandType.StoredProcedure; da.Fill(table); } //Get the roots that the company is in and the children foreach (DataRow row in table.Rows) { var found = false; var rowRoot = (Guid)row[1]; for (int i=1; i < table.Columns.Count; i+=2) { var checking = (Guid)row[i]; if (found && checking==allCompanies.Last()) break; if (!found && myCompanies.Contains(checking)) { found = true; rootCompanies.Add(rowRoot); } if (found) allCompanies.Add(checking); } } } //Get my licenses & applications, assets, models, parts var licenses = (from o in c.Licenses where o.ContactID==contactID && o.LicenseID!=null select o); var assets = (from o in c.LicenseAssets where !(from x in c.Licenses where x.ContactID==contactID select x.LicenseID).Contains(o.LicenseID.Value) select o); var parts = (from o in c.LicenseAssetModelParts where !(from x in assets select x.LicenseAssetID).Contains(o.LicenseAssetID.Value) select o); var users = (from o in c.Users where o.UserName==username select o.UserId); var applications = (from o in c.Applications join x in c.Users on o.ApplicationId equals x.ApplicationId where x.UserName == username select o.ApplicationId); var experiences = (from o in c.Experiences where o.ContactID == contactID select o); //TODO: Do this based on experience instead of all!!! HACK! var bl = (from o in c.SecurityBlacklists select o); var wl = (from o in c.SecurityWhitelists select o); return new Authority(contactID.Value, username, userID, ApplicationID, bl, wl, applications, r, experiences, licenses, assets, parts, users, allCompanies, rootCompanies); } }
public void DeleteSecurity(ISecured secured) { //TODO!!!: When writing security check for antecedentid = referenceid &/or version=0 if (secured.SecurityID.HasValue) { if (CheckOwnership(secured, ActionPermission.Read | ActionPermission.Delete)) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); if (secured.IsBlack) { var s = (from o in c.SecurityBlacklists where o.SecurityBlacklistID == secured.SecurityID && o.Version == 0 && o.VersionDeletedBy == null select o).Single(); c.SecurityBlacklists.DeleteObject(s); } else { var s = (from o in c.SecurityWhitelists where o.SecurityWhitelistID == secured.SecurityID && o.Version == 0 && o.VersionDeletedBy == null select o).Single(); c.SecurityWhitelists.DeleteObject(s); } c.SaveChanges(); } } else throw new AuthorityException(string.Format("Incorrect permission for action: \"Delete\" Contact: {0} Record: {1}", secured.AccessorContactID, secured.OwnerReferenceID)); } else throw new NotSupportedException("Can not delete a security record without an ID."); }
public void UpdateSecurity(ISecured secured) { //TODO!!!: When writing security check for antecedentid = referenceid &/or version=0 //First check user and owner rights, black list and white list against record //Then get ok if (secured.SecurityID.HasValue) { //Call Edit if (CheckOwnership(secured, ActionPermission.Read | ActionPermission.Update)) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); if (secured.IsBlack) { var s = (from o in c.SecurityBlacklists where o.SecurityBlacklistID==secured.SecurityID && o.Version==0 && o.VersionDeletedBy==null select o).Single(); s.AccessorContactID = secured.AccessorContactID; s.AccessorApplicationID = secured.AccessorApplicationID; s.AccessorCompanyID = secured.AccessorCompanyID; s.AccessorProjectID = secured.AccessorProjectID; s.AccessorRoleID = secured.AccessorRoleID; s.CanCreate = secured.CanCreate; s.CanRead = secured.CanRead; s.CanDelete = secured.CanDelete; s.CanUpdate = secured.CanUpdate; s.VersionUpdated = DateTime.UtcNow; s.VersionUpdatedBy = secured.OwnerContactID; } else { var s = (from o in c.SecurityWhitelists where o.SecurityWhitelistID == secured.SecurityID && o.Version== 0 && o.VersionDeletedBy == null select o).Single(); s.AccessorContactID = secured.AccessorContactID; s.AccessorApplicationID = secured.AccessorApplicationID; s.AccessorCompanyID = secured.AccessorCompanyID; s.AccessorProjectID = secured.AccessorProjectID; s.AccessorRoleID = secured.AccessorRoleID; s.CanCreate = secured.CanCreate; s.CanRead = secured.CanRead; s.CanDelete = secured.CanDelete; s.CanUpdate = secured.CanUpdate; s.VersionUpdated = DateTime.UtcNow; s.VersionUpdatedBy = secured.OwnerContactID; } c.SaveChanges(); } } else throw new AuthorityException(string.Format("Incorrect permission for action: \"Update\" Contact: {0} Record: {1}", secured.AccessorContactID, secured.OwnerReferenceID)); } else { //Call New if (CheckOwnership(secured, ActionPermission.Read | ActionPermission.Create)) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); if (secured.IsBlack) { var s = new SecurityBlacklist { SecurityBlacklistID = Guid.NewGuid(), AccessorContactID = secured.AccessorContactID, AccessorApplicationID = secured.AccessorApplicationID, AccessorCompanyID = secured.AccessorCompanyID, AccessorProjectID = secured.AccessorProjectID, AccessorRoleID = secured.AccessorRoleID, OwnerApplicationID = secured.OwnerApplicationID, OwnerCompanyID = secured.OwnerCompanyID, OwnerContactID = secured.OwnerContactID, OwnerEntitySystemType = secured.OwnerEntitySystemType, OwnerField = secured.OwnerField, OwnerProjectID = secured.OwnerProjectID, OwnerReferenceID = secured.OwnerReferenceID, OwnerTableType = secured.OwnerTableType, CanCreate = secured.CanCreate, CanRead = secured.CanRead, CanDelete = secured.CanDelete, CanUpdate = secured.CanUpdate, VersionOwnerContactID = secured.OwnerContactID, VersionOwnerCompanyID = secured.OwnerCompanyID, VersionUpdated = DateTime.UtcNow, VersionUpdatedBy = secured.OwnerContactID }; c.SecurityBlacklists.AddObject(s); } else { var s = new SecurityWhitelist { SecurityWhitelistID = Guid.NewGuid(), AccessorContactID = secured.AccessorContactID, AccessorApplicationID = secured.AccessorApplicationID, AccessorCompanyID = secured.AccessorCompanyID, AccessorProjectID = secured.AccessorProjectID, AccessorRoleID = secured.AccessorRoleID, OwnerApplicationID = secured.OwnerApplicationID, OwnerCompanyID = secured.OwnerCompanyID, OwnerContactID = secured.OwnerContactID, OwnerEntitySystemType = secured.OwnerEntitySystemType, OwnerField = secured.OwnerField, OwnerProjectID = secured.OwnerProjectID, OwnerReferenceID = secured.OwnerReferenceID, OwnerTableType = secured.OwnerTableType, CanCreate = secured.CanCreate, CanRead = secured.CanRead, CanDelete = secured.CanDelete, CanUpdate = secured.CanUpdate, VersionOwnerContactID = secured.OwnerContactID, VersionOwnerCompanyID = secured.OwnerCompanyID, VersionUpdated = DateTime.UtcNow, VersionUpdatedBy = secured.OwnerContactID }; c.SecurityWhitelists.AddObject(s); } c.SaveChanges(); } } else throw new AuthorityException(string.Format("Incorrect permission for action: \"Create\" Contact: {0} Record: {1}", secured.AccessorContactID, secured.OwnerReferenceID)); } }
public void SyncUsers() { //Get Orchard Users & Roles var orchardUsers = _contentManager.Query<UserPart, UserPartRecord>().List(); var orchardRoles = _roleService.GetRoles().ToArray(); var orchardUserRoles = (from xur in _userRolesRepository.Table.ToArray() join xu in orchardUsers on xur.UserId equals xu.Id join xr in orchardRoles on xur.Role.Id equals xr.Id select new {xu.UserName, RoleName=xr.Name}).ToArray(); //Get Authmode & Then Update if (AuthenticationMode == System.Web.Configuration.AuthenticationMode.Forms) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); var r = from o in c.Roles.Include("aspnet_Users") where o.ApplicationId == ApplicationID select o; var u = from o in c.Users.Include("aspnet_Roles") where o.ApplicationId == ApplicationID select o; var updated = DateTime.UtcNow; //New User var nu = (from o in orchardUsers where !(from ou in u select ou.UserName).Contains(o.UserName) select o); foreach (var n in nu) { var user = new Users(); user.UserId = Guid.NewGuid(); user.UserName = n.UserName; user.ApplicationId = ApplicationID; user.LoweredUserName = n.UserName.ToLower(); user.LastActivityDate = updated; c.Users.AddObject(user); var contacts = (from o in c.Contacts where o.Username == user.UserName select o); foreach (var nc in contacts) { nc.AspNetUserID = user.UserId; } if (!contacts.Any()) { var contact = new Contact(); contact.ContactID = Guid.NewGuid(); contact.Username = user.UserName; contact.AspNetUserID = user.UserId; contact.DefaultEmail = n.Email; contact.ContactName = string.Format("Orchard User: {0}", user.UserName); contact.VersionUpdated = updated; contact.Surname = ""; contact.Firstname = ""; c.Contacts.AddObject(contact); } } //New Role var nr = (from o in orchardRoles where !(from or in r select or.RoleName).Contains(o.Name) select o); foreach (var n in nr) { var role = new Roles(); role.RoleName = n.Name; role.ApplicationId = ApplicationID; role.RoleId = Guid.NewGuid(); role.LoweredRoleName = n.Name.ToLower(); c.Roles.AddObject(role); } c.SaveChanges(); foreach (var role in r) { foreach (var user in role.aspnet_Users) { //Remove if (!orchardUserRoles.Any(f => f.RoleName == role.RoleName && f.UserName == user.UserName)) user.aspnet_Roles.Remove(role); } foreach (var user in u) { //Add if (orchardUserRoles.Any(f => f.RoleName == role.RoleName && f.UserName == user.UserName) && !role.aspnet_Users.Any(f=>f.UserName == user.UserName)) user.aspnet_Roles.Add(role); } } c.SaveChanges(); var ru = (from o in u.ToArray() where !(from ou in orchardUsers select ou.UserName).Contains(o.UserName) select o); //can just delete from users table foreach (var rem in ru) { c.Users.DeleteObject(rem); } c.SaveChanges(); } } else if (AuthenticationMode == System.Web.Configuration.AuthenticationMode.Windows) { //Module syncs only users - only all admin for now //Get AD Users // throw new NotImplementedException(); // get a DirectorySearcher object DirectorySearcher search = new DirectorySearcher(); // specify the search filter search.Filter = "(&(objectCategory=person)(objectClass=user))"; //search.Filter = "(&(objectClass=user)(anr=agrosser))"; //TEST //// specify which property values to return in the search search.PropertiesToLoad.Add("name"); // first name search.PropertiesToLoad.Add("givenName"); // first name search.PropertiesToLoad.Add("sn"); // last name search.PropertiesToLoad.Add("mail"); // smtp mail address search.PropertiesToLoad.Add("samaccountname"); // account name search.PropertiesToLoad.Add("memberof"); // groups search.PropertiesToLoad.Add("objectsid"); search.PropertiesToLoad.Add("objectguid"); search.PropertiesToLoad.Add("title"); // perform the search SearchResultCollection results = search.FindAll(); //.FindOne(); var sessionRoleCache = new Dictionary<string, string>(); var adusers = from SearchResult o in results select new { name = o.Properties["name"] != null && o.Properties["name"].Count > 0 ? string.Format("{0}", o.Properties["name"][0]) : null, givenName = o.Properties["givenName"] != null && o.Properties["givenName"].Count > 0 ? string.Format("{0}", o.Properties["givenName"][0]) : null, sn = o.Properties["sn"] != null && o.Properties["sn"].Count > 0 ? string.Format("{0}", o.Properties["sn"][0]) : null, email = o.Properties["mail"] != null && o.Properties["mail"].Count > 0 ? string.Format("{0}", o.Properties["mail"][0]) : null, samaccountname = o.Properties["samaccountname"] != null && o.Properties["samaccountname"].Count > 0 ? string.Format("{0}", o.Properties["samaccountname"][0]) : null, username = o.Properties["objectsid"] != null && o.Properties["objectsid"].Count > 0 ? ((NTAccount)(new SecurityIdentifier((byte[])o.Properties["objectsid"][0], 0)).Translate(typeof(NTAccount))).ToString() : null, guid = o.Properties["objectguid"] != null && o.Properties["objectguid"].Count > 0 ? new Guid((byte[])o.Properties["objectguid"][0]) : (Guid?)null, title = o.Properties["title"] != null && o.Properties["title"].Count > 0 ? string.Format("{0}", o.Properties["title"][0]) : null, roles = o.Properties["memberof"] != null ? (from string m in o.Properties["memberof"] select getNameFromFQDN(m, sessionRoleCache)).ToArray() : new string[] { } }; //Get XODB Users Contact[] xodbusers; using (new TransactionScope(TransactionScopeOption.Suppress)) { var d = new XODBC(ApplicationConnectionString,null); xodbusers = (from o in d.Contacts select o).ToArray(); //Sync AD, Orchard, XODB //New into XODB //We need firstname, surname var ad_new = (from o in adusers where o.givenName != null && o.sn != null && (o.guid.HasValue && !(from x in xodbusers select x.ContactID).Contains((Guid)o.guid)) || (!o.guid.HasValue && !(from x in xodbusers select x.Username.ToLowerInvariant()).Contains(o.username.ToLowerInvariant())) select o); foreach (var o in ad_new) { Contact c = new Contact(); c.ContactID = o.guid.HasValue ? o.guid.Value : Guid.NewGuid(); c.Username = o.username; c.Firstname = o.givenName; c.ContactName = string.Join(string.Empty, string.Format("{0} [{1}]", o.name, o.username).Take(120)); c.Surname = o.sn; c.DefaultEmail = o.email; d.Contacts.AddObject(c); } //Updates into XODB var ad_diff = from o in adusers from x in xodbusers where ((o.guid.HasValue && o.guid.Value == x.ContactID) || (o.username != null && x.Username != null && o.username.ToLowerInvariant() == x.Username.ToLowerInvariant())) //Things to update && ( o.givenName != x.Firstname || o.sn != x.Surname || o.email != x.DefaultEmail || o.name != x.ContactName ) select new { x.ContactID, o.givenName, o.sn, o.email, o.name, o.username }; foreach (var o in ad_diff) { var c = xodbusers.First(x => x.ContactID == o.ContactID); c.Firstname = o.givenName; c.ContactName = string.Join(string.Empty, string.Format("{0} [{1}]", o.name, o.username).Take(120)); c.Surname = o.sn; c.DefaultEmail = o.email; } d.SaveChanges(); } } }
//private AssayGroupSubsample GetAssayGroupSubSample(XODBC entityObj, Guid agGuid, Guid? workflowID, Sample originalSample, int seqNum) //{ // return agw; //} private AssayGroupSubsamplePrecondition GetAssayGroupPrecondition(XODBC entityObj, string preconditionName, string preconditionType, Guid ssGuid) { AssayGroupSubsamplePrecondition agw = null; //IQueryable<AssayGroupSubsamplePrecondition> res = entityObj.AssayGroupSubsamplePreconditions.Where(c => c.PreconditionName.Trim().Equals(preconditionName.Trim()) && c.AssayGroupSubsampleID == ssGuid); //foreach (AssayGroupSubsamplePrecondition xx in res) //{ // agw = xx; //} if (agw == null) { agw = new AssayGroupSubsamplePrecondition(); agw.PreconditionName = preconditionName; agw.AssayGroupSubsampleID = ssGuid; agw.AssayGroupSubsamplePreconditionID = Guid.NewGuid(); //TODO - make this more efficient by storing the Parameters in a dicitonary so lookup is fast rather than // hitting the DB for every record Guid gParam = this.FindParameter("AssayPrecondition", preconditionType); agw.PreconditionParameterID = gParam; //agw.PreconditionParameterID = new Guid("6f49ded6-fe9b-487f-be48-eb8c88d9beef"); //Sixe mm TODO FIX //+32 bigger than 32 (Size fractions) //-32 smaller than 32 //+16 bigger than 16 //-16+8 smaller than 16, bigger than 8 //-8+4 //-4+2 //-2+1 //-1+0.5 //F1.45 Floatation (density floats) //F1.45 //S1.45-F1.50 //S1.50-F1.55 //S1.55-F1.60 //S1.60-F1.70 //S1.70-F1.80 //S1.80-F2.00 //S2.00 (sinks) //P2 Froth duration (30 sec) //P3 //P4 //P5 //T2 //T1 entityObj.AssayGroupSubsamplePreconditions.AddObject(agw); entityObj.SaveChanges(); } return agw; }
public IEnumerable<Contact> GetContacts() { using (new TransactionScope(TransactionScopeOption.Suppress)) { var dataContext = new XODBC(ApplicationConnectionString,null); return dataContext.Contacts.OrderBy(x=>x.ContactName).ToArray(); } }
private AssayGroupWorkflowProcedure GetAssayGroupWorkflowProcedure(XODBC entityObj, string stage, AssayGroupWorkflow assayGroupWorkflow) { AssayGroupWorkflowProcedure agw = null; //IQueryable<AssayGroupWorkflowProcedure> res = entityObj.AssayGroupWorkflowProcedures.Where(c => c.WorkflowStateName.Trim().Equals(stage.Trim()) && c.AssayGroupWorkflowID == assayGroupWorkflow.AssayGroupWorkflowID); //foreach (AssayGroupWorkflowProcedure xx in res) //{ // agw = xx; //} //if (agw == null) //{ agw = new AssayGroupWorkflowProcedure(); agw.AssayGroupWorkflowID = assayGroupWorkflow.AssayGroupWorkflowID; agw.AssayGroupWorkflow = assayGroupWorkflow; agw.AssayGroupWorkflowProcedureID = Guid.NewGuid(); agw.WorkflowStateName = stage; agw.Sequence = WorkflowProcedureSequenceNumber; WorkflowProcedureSequenceNumber++; entityObj.AssayGroupWorkflowProcedures.AddObject(agw); entityObj.SaveChanges(); //} return agw; }
public Dictionary<Guid, string> GetRoles() { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); var r = (from o in c.Applications join a in c.Roles on o.ApplicationId equals a.ApplicationId select new { RoleName = a.RoleName + " (" + o.ApplicationName + ")", a.RoleId }); return r.ToDictionary(f=>f.RoleId, f=>f.RoleName); } }
internal List<object> ImportLASFile(Xstract.Import.LAS.LASFile lasFile, string origFilename, ModelImportStatus mos, Guid currentProject, Action<string, double> UpdateStatus) { int li = origFilename.LastIndexOf("\\"); string tempHoleIDa = origFilename.Substring(li); li = tempHoleIDa.LastIndexOf("."); string tempHoleID = tempHoleIDa.Substring(1, li - 1); int rowCounter = 0; // now try and get the hole name from a header item. Typically the header name might be in // WELL in the Well information header section string res = lasFile.LookupWellHeaderSection("WELL"); if (res != null && res.Trim().Length > 0) { tempHoleID = res; } List<object> dataList = new List<object>(); try { // here we need to create the Geophyiscs data row item var entityObj = new XODBC(BaseImportTools.XSTRING, null, false); //entityObj.AutoDetectChangesEnabled = false; //TODO: Exhaust this, should be faster now var physDataList = new List<Geophysics>(); //var fDataList = new List<XODB.Module.BusinessObjects.File>(); var fdDataList = new List<FileData>(); Geophysics xG = new Geophysics(); xG.FileName = origFilename; Guid gg = Guid.NewGuid(); xG.GeophysicsID = gg; Guid holeGuid = CollarQueries.FindHeaderGuid(tempHoleID, currentProject); Guid resHole = new Guid(); if (!holeGuid.ToString().Equals(resHole.ToString())) { xG.HeaderID = holeGuid; } Guid unitGuid = new Guid("2395DE56-8F6F-4B0C-806C-DD2606B9902B"); //FIXME: Magic Number UnitQueries uq = new UnitQueries(); DictionaryUnit xu = uq.FindUnits("m"); if (xu != null) { unitGuid = xu.UnitID; } xG.DimensionUnitID = unitGuid; xG.LasVersion = string.Format("{0:N1}",lasFile.versionValue); xG.LasWrap = lasFile.versionWrap; xG.LasNullValue = string.Format("{0:N2}",lasFile.nullValue); FileStream sr = null; try { sr = new FileStream(lasFile.filePath, FileMode.Open); } catch (FileNotFoundException fex) { Console.WriteLine("FileNotFoundException:" + fex.ToString()); } catch (Exception ex) { Console.WriteLine(ex.ToString()); } XODB.Module.BusinessObjects.File F = new XODB.Module.BusinessObjects.File(); F.LoadFromStream(lasFile.FileName(), sr); sr = null; Guid fdGUID = Guid.NewGuid(); var fD = new FileData { Author = default(string), FileDataID = fdGUID, ReferenceID = xG.GeophysicsID, TableType = "X_Geophysics", FileName = F.FileName, FileBytes = F.FileBytes, FileChecksum = Hash.ComputeHash(F.FileBytes), MimeType = MimeTypes.MimeTypeHelper.GetMimeTypeByFileName(F.FileName) }; xG.OriginalFileDataID = fD.FileDataID; physDataList.Add(xG); fdDataList.Add(fD); F = null; // here we need to add a GeophysicsMetadata item for each column Dictionary<string, Guid> metaDataIDLookup = new Dictionary<string, Guid>(); var unitDataList = new List<DictionaryUnit>(); var paramDataList = new List<Parameter>(); var metaDataList = new List<GeophysicsMetadata>(); foreach (string s in lasFile.columnHeaders) { Parameter xp = null; xp = GetParameterIDFor(entityObj, "LAS data column", s); //test to see if we already have the unit string splitter = s.Split(new char[] { '.' }, StringSplitOptions.RemoveEmptyEntries).LastOrDefault().Trim(); uq = new UnitQueries(); xu = uq.FindUnits(splitter); if (xp != null && xu != null) { xp.UnitID = xu.UnitID; //xp.Unit = xu; } else { bool xpnull = false; if (xp == null) { xpnull = true; xp = new Parameter(); Guid pg = Guid.NewGuid(); xp.ParameterID = pg; xp.ParameterType = "LAS data column"; xp.ParameterName = s; } //test to see if we already have added a unit into the list if (unitDataList.Count > 0 && xu == null) { xu = unitDataList.Where(c => c.StandardUnitName == splitter).FirstOrDefault(); } if (xu == null) { //create new unit here store it and pass to parameters Guid ug = Guid.NewGuid(); xu = new DictionaryUnit { UnitID = ug, StandardUnitName = splitter, CoalUnitName = splitter, StrictlySI = false }; unitDataList.Add(xu); } xp.UnitID = xu.UnitID; if (xpnull) { paramDataList.Add(xp); } } Guid gmid = Guid.NewGuid(); GeophysicsMetadata xgm = new GeophysicsMetadata { GeophysicsID = gg, GeophysicsMetadataID = gmid, Unit = xu.StandardUnitName, Mnemonic = s, ParameterID = xp.ParameterID }; metaDataList.Add(xgm); metaDataIDLookup.Add(s, gmid); } int insertCounter = 0; var geoDataList = new List<GeophysicsData>(); foreach (LASDataRow ldr in lasFile.dataRows) { double depth = ldr.depth; for (int i = 0; i < ldr.rowData.Count(); i++) { GeophysicsData xd1 = new GeophysicsData(); string s = lasFile.columnHeaders[i]; xd1.GeophysicsDataID = Guid.NewGuid(); Guid g = new Guid(); bool found = metaDataIDLookup.TryGetValue(s, out g); if (found) { xd1.GeophysicsMetadataID = g; } xd1.Dimension = (decimal)depth; xd1.MeasurementValue = (decimal)ldr.rowData[i]; geoDataList.Add(xd1); } insertCounter++; rowCounter++; } lasFile = null; dataList.Add(fdDataList); dataList.Add(physDataList); dataList.Add(unitDataList); dataList.Add(paramDataList); dataList.Add(metaDataList); dataList.Add(geoDataList); fdDataList = null; physDataList = null; unitDataList = null; paramDataList = null; metaDataIDLookup = null; metaDataList = null; geoDataList = null; } catch (Exception ex) { mos.errorMessages.Add("Failed to complete import of LAS file: " + origFilename); mos.errorMessages.Add("Details: " + ex.Message.ToString()); if (ex.InnerException != null) mos.errorMessages.Add("Inner Exception: " + ex.InnerException.Message.ToString()); mos.errorMessages.Add("Row: " + rowCounter); } mos.recordsAdded = rowCounter; return dataList; }
public Dictionary<Guid,string> GetCompanies() { var allCompanies = new Dictionary<Guid,string>(); using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new XODBC(ApplicationConnectionString,null); using (DataTable table = new DataTable()) { using (var con = new SqlConnection(ApplicationConnectionString)) using (var cmd = new SqlCommand("X_SP_GetCompanies", con)) using (var da = new SqlDataAdapter(cmd)) { cmd.CommandType = CommandType.StoredProcedure; da.Fill(table); } foreach (DataRow row in table.Rows) { var rowRoot = (Guid)row[1]; var companyName = ""; Guid? lastKey = null; for (int i = 2; i < table.Columns.Count; i += 2) { companyName += (string)row[i]; var checking = (Guid)row[i+1]; if (lastKey.HasValue && lastKey.Value == checking) break; lastKey = checking; if (!allCompanies.ContainsKey(checking)) allCompanies.Add(checking, companyName); companyName += " - "; } } } } return (from o in allCompanies orderby o.Value select o).ToDictionary(f=>f.Key, f=>f.Value); }
internal void AddAssayData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid XODBProjectID, bool checkForDuplicates, bool doImportOverwrite) { bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new XODBC(connectionString, null)) { //entityObj.Configuration.AutoDetectChangesEnabled = false; Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = XODBProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary<ColumnMap, Guid> resultsColumns = new Dictionary<ColumnMap, Guid>(); Dictionary<Guid, AssayGroupTest> assayGroups = new Dictionary<Guid, AssayGroupTest>(); foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[ASSAY")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameterForAssayTypeName(cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database try { int domainColIDX = -1; connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; FileStream fs = null; try { //fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read); sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float pct = 0; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); Dictionary<string, int> columnIDX = new Dictionary<string, int>(); int fkLookupCount = 0; PopulateCMapShortcut("HeaderID", importMap, columnIDX); PopulateCMapShortcut("FromDepth", importMap, columnIDX); PopulateCMapShortcut("ToDepth", importMap, columnIDX); PopulateCMapShortcut("SampleNumber", importMap, columnIDX); PopulateCMapShortcut("LabSampleNumber", importMap, columnIDX); PopulateCMapShortcut("LabBatchNumber", importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); AssayQueries assayQueries = new AssayQueries(); if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { // digest a row of input data List<string> items = parseTestLine(line, importMap.inputDelimiter); Guid holeID = new Guid(); Decimal fromDepth = new Decimal(-9999999999); Decimal toDepth = new Decimal(-9999999999); string sampleNumber = null; string labBatchNumber = null; string labsampleNumber = null; // find mapped values by name //ColumnMap cmap = importMap.FindItemsByTargetName("HeaderID"); int idxVal = 0; bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, XODBProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName(); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName("ToDepth"); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } List<Sample> duplicateList = null; bool isDuplicateInterval = false; if (checkForDuplicates) { if (hasFrom && hasTo) { // here we need to check that the hole is not duplicated duplicateList = assayQueries.CheckForDuplicate(holeID, fromDepth, toDepth); if (duplicateList.Count > 0) { isDuplicateInterval = true; } } if (isDuplicateInterval) { hasDuplicateIntervals = true; mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); if (!doImportOverwrite) { mos.recordsFailed++; continue; } } } //cmap = importMap.FindItemsByTargetName("SampleNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) // if (cmap != null) { string ii = items[idxVal]; sampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabSampleNumber"); //if (cmap != null) idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabBatchNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } Sample xs = new Sample(); if (isDuplicateInterval == true) { xs = duplicateList.First(); } else { xs.SampleID = Guid.NewGuid(); xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; entityObj.Samples.AddObject(xs); } // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns bool assayUpdated = false; bool assayAdded = false; foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = new AssayGroupTestResult(); /*bool assayResFound = false; if (isDuplicateInterval) { List<AssayGroupTestResult> testResults = assayQueries.GetDuplicateResult(xs.SampleID, cm.sourceColumnName); if (testResults.Count > 0) { testResult = testResults.First(); assayResFound = true; } }*/ //if(!assayResFound) // { testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.VersionUpdated = currentUpdateTimestamp; //} testResult.LabBatchNumber = labBatchNumber; // testResult.LabSampleName = labsampleNumber; Decimal result = new Decimal(); if (items.Count >= cm.sourceColumnNumber) { bool parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); if (parsedOK) { testResult.LabResult = result; } else { testResult.LabResultText = items[cm.sourceColumnNumber]; } } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } //if (isDuplicateInterval == false) //{ entityObj.AssayGroupTestResults.AddObject(testResult); assayAdded = true; //}else{ // if (!assayResFound) // { // entityObj.AssayGroupTestResult.Add(testResult); // assayAdded = true; // } // else { // assayUpdated = true; // } //} } if (assayAdded == true) { mos.recordsAdded++; } if (assayUpdated) { mos.recordsUpdated++; } tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; //Console.WriteLine("Processing line "+ct); } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing assays to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing assays to database ", 0); mos.AddErrorMessage("Error writing assay data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }
public string[] GetUserEmails(Guid[] users) { if (users == null || users.Length == 0) return new string[] { }; using (new TransactionScope(TransactionScopeOption.Suppress)) { var d = new XODBC(ApplicationConnectionString,null); var o = from c in d.Contacts where users.Contains(c.ContactID) && c.DefaultEmail != null select c.DefaultEmail; return o.ToArray(); } }
private Guid FindParameterForAssayTypeName(string pName) { Guid pid = new Guid(); Parameter xp = new Parameter(); using (var entityObj = new XODBC(BaseImportTools.XSTRING, null)) { bool found = false; IQueryable<Parameter> res = entityObj.Parameters.Where(c => c.ParameterType.Equals("AssayTypeName") && c.ParameterName.Equals(pName)); foreach (Parameter xx in res) { found = true; pid = xx.ParameterID; break; } if (!found) { Parameter pp = new Parameter(); pid = Guid.NewGuid(); pp.ParameterID = pid; pp.ParameterType = "AssayTypeName"; pp.ParameterName = pName; pp.Description = pName; pp.VersionUpdated = DateTime.UtcNow; entityObj.Parameters.AddObject(pp); entityObj.SaveChanges(); } return pid; } }
public Guid? GetContactID(string username) { if (username == null) return null; using (new TransactionScope(TransactionScopeOption.Suppress)) { var d = new XODBC(ApplicationConnectionString,null); return (from c in d.Contacts join u in d.Users on c.AspNetUserID equals u.UserId where u.ApplicationId == ApplicationID select c.ContactID).Single(); //return d.Contacts.Where(x=>x.Username == username).Select(x=>x.ContactID).FirstOrDefault(); } }