public List<string> PerformBMImport(ModelImportStatus mos, Guid blockModelGUID, System.IO.Stream bmFileStream, System.IO.Stream ffFileStream, ImportDataMap importMap, double xOrigin, double yOrigin, double zOrigin, System.ComponentModel.BackgroundWorker worker, int approxNumLines, string NKDProjectID, string alias, Guid authorGuid, string connString) { this.currentWorker = worker; using (var entityObj = new NKDC(connString, null)) { // talk to the import lib to do the import DateTime startTime = DateTime.Now; int batchSize = 1000; //UpdateStatus("Creating new NKD block model", 20.0); ImportUtils.BlockImport dbIm = null; try { dbIm = new ImportUtils.BlockImport(); //ImportDataMap importMapLoaded = FormatSpecificationIO.ImportMapIO.LoadImportMap(ffFileStream); BlockModel xAdd = new BlockModel(); xAdd.OriginX = (Decimal)xOrigin; // TO-DO xAdd.OriginY = (Decimal)yOrigin; // TO-DO xAdd.OriginZ = (Decimal)zOrigin; // TO-DO xAdd.Alias = alias; // when on server, automatically pick up the author GUID and apply it to the model. if (currentWorker == null) { xAdd.AuthorContactID = authorGuid; xAdd.ResponsibleContactID = authorGuid; } xAdd.VersionUpdated = DateTime.UtcNow; xAdd.BlockModelID = blockModelGUID; xAdd.ProjectID = new Guid(NKDProjectID); // TODO - allow user to pick size entityObj.BlockModels.AddObject(xAdd); entityObj.SaveChanges(); UpdateStatus("Setting model meta data", 25.0); // add the meta data to identify all of the oclumns etc. } catch (Exception ex) { mos.AddErrorMessage("Error setting block model defintion data. " + ex.ToString()); } List<string> domains = new List<string>(); if (dbIm != null) { try { List<BlockModelMetadata> blockColumnMetaData = dbIm.SetBlockModelMetaData(blockModelGUID, importMap, connString); } catch (Exception ex) { mos.AddErrorMessage("Error setting block model meta data:\n" + ex.ToString()); } try { // add the new BM guid to the column map as a default so that it is always entered importMap.columnMap.Add(new ColumnMap("", -1, "BlockModelBlock", "BlockModelID", ImportDataMap.TEXTDATATYPE, blockModelGUID.ToString(), blockModelGUID.ToString(), ImportDataMap.UNIT_NONE)); // add the individual blocks domains = dbIm.AddBlockData(mos, bmFileStream, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, connString); // run this only if in wonows client (determined by the status of the worker thread at this stage) if (currentWorker != null) { List<Tuple<string, string>> doms = new List<Tuple<string, string>>(); string domainColumnName = "Domain"; foreach (string ss in domains) { doms.Add(new Tuple<string, string>(domainColumnName, ss)); } dbIm.UpdateDomains(doms, blockModelGUID); } } catch (Exception ex) { mos.AddErrorMessage("Error adding block data:\n" + ex.ToString()); } } return domains; } }
/// <summary> /// /// </summary> /// <param name="bmDataFile"></param> /// <param name="selectedFormatBMFile"></param> /// <param name="importMap"></param> /// <param name="xOrigin"></param> /// <param name="yOrigin"></param> /// <param name="zOrigin"></param> /// <param name="worker"></param> /// <param name="approxNumLines"></param> /// <param name="NKDProjectID"></param> /// <param name="units"></param> /// <param name="connString"></param> /// <returns></returns> public string PerformBMImport(string bmDataFile, string selectedFormatBMFile, ImportDataMap importMap, double xOrigin, double yOrigin, double zOrigin, System.ComponentModel.BackgroundWorker worker, int approxNumLines, string NKDProjectID, string units, string connString) { this.currentWorker = worker; UpdateStatus("Connecting to NKD", 10.0); using (var entityObj = new NKDC(connString, null)) { // talk to the import lib to do the import var query = from BlockModel in entityObj.BlockModels select new { BlockModel.BlockModelID, BlockModel.OriginX, BlockModel.OriginY, BlockModel.OriginZ, BlockModel.ProjectID }; List<string> cn = new List<string>(); //For each field in the database (or property in Linq object) BlockModel ob = new BlockModel(); foreach (PropertyInfo pi in ob.GetType().GetProperties()) { Type ty = pi.GetType(); String name = pi.Name; cn.Add(name); } DateTime startTime = DateTime.Now; int batchSize = 100; UpdateStatus("Creating new NKD block model", 20.0); ImportUtils.BlockImport dbIm = new ImportUtils.BlockImport(); Guid blockModelGUID = Guid.NewGuid(); BlockModel xAdd = new BlockModel(); xAdd.OriginX = (Decimal)xOrigin; // TO-DO xAdd.OriginY = (Decimal)yOrigin; // TO-DO xAdd.OriginZ = (Decimal)zOrigin; // TO-DO xAdd.BlockModelID = blockModelGUID; xAdd.ProjectID = new Guid(NKDProjectID); // TODO - allow user to pick size entityObj.BlockModels.AddObject(xAdd); entityObj.SaveChanges(); UpdateStatus("Setting model meta data", 25.0); // add the meta data to identify all of the oclumns etc. List<BlockModelMetadata> blockColumnMetaData = dbIm.SetBlockModelMetaData(blockModelGUID, importMap, connString); // add the new BM guid to the column map as a default so that it is always entered importMap.columnMap.Add(new ColumnMap("", -1, "BlockModelBlock", "BlockModelID", ImportDataMap.TEXTDATATYPE, blockModelGUID.ToString(), null, units)); // add the individual blocks dbIm.AddBlockData(bmDataFile, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, connString); //dbIm.AddBlockDataNorm(bmDataFile, importMap, blockModelGUID, batchSize, UpdateStatus, approxNumLines, blockColumnMetaData); DateTime endTime = DateTime.Now; long compVal = (endTime.Ticks - startTime.Ticks) / 1000; string message = "" + startTime.ToShortTimeString() + " Ended: " + endTime.ToShortTimeString(); long xval = compVal; return ""; } }
internal void AddAssayData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid NKDProjectID, bool checkForDuplicates, bool doImportOverwrite) { bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new NKDC(connectionString, null)) { //entityObj.Configuration.AutoDetectChangesEnabled = false; Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = NKDProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary<ColumnMap, Guid> resultsColumns = new Dictionary<ColumnMap, Guid>(); Dictionary<Guid, AssayGroupTest> assayGroups = new Dictionary<Guid, AssayGroupTest>(); foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[ASSAY")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameterForAssayTypeName(cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database string line = null; try { int domainColIDX = -1; connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; int numCommits = 0; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; FileStream fs = null; try { //fs = new FileStream(textInputDataFile, FileMode.Open, FileAccess.Read, FileShare.Read); sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } line = null; float pct = 0; float bct = 1; // report every X blocks int repCount = 0; //int reportOnBlock = 1000; float fNumLines = (float)approxNumLines; Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); Dictionary<string, int> columnIDX = new Dictionary<string, int>(); int fkLookupCount = 0; BaseImportTools.PopulateCMapShortcut(importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); AssayQueries assayQueries = new AssayQueries(); List<string> items = new List<string>(); if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { var append = BaseImportTools.ParseTestLine(line, importMap.inputDelimiter); if (items.Count == 0 || append.Count == importMap.MaxColumns) items = append; else if (items.Count < importMap.MaxColumns) { items[items.Count - 1] = items[items.Count - 1] + append[0]; items.AddRange(append.Skip(1)); } if (items.Count < importMap.MaxColumns) { mos.AddWarningMessage(string.Format("Bad CSV file, attempted to join....{0}", linesRead)); continue; } else if (items.Count > importMap.MaxColumns) { mos.AddWarningMessage(string.Format("FAILED! Line {0}. Bad CSV file, attempted to join.", linesRead)); items.Clear(); continue; } // digest a row of input data Guid holeID = new Guid(); Decimal? fromDepth = null; Decimal? toDepth = null; string sampleNumber = null; string labBatchNumber = null; string labsampleNumber = null; Decimal? sampleMassKg = null; Decimal? dryMassKg = null; string standardSampleTypeName = null; // find mapped values by name //ColumnMap cmap = importMap.FindItemsByTargetName("HeaderID"); int idxVal = 0; bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, NKDProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName(); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) //cmap = importMap.FindItemsByTargetName("ToDepth"); //if (cmap != null) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } List<Sample> duplicateList = null; bool isDuplicateInterval = false; if (checkForDuplicates) { if (hasFrom && hasTo) { // here we need to check that the hole is not duplicated duplicateList = assayQueries.CheckForDuplicate(holeID, fromDepth, toDepth); if (duplicateList.Count > 0) { isDuplicateInterval = true; } } if (isDuplicateInterval) { hasDuplicateIntervals = true; mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); if (!doImportOverwrite) { mos.recordsFailed++; continue; } } } //cmap = importMap.FindItemsByTargetName("SampleNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleNumber", out idxVal); if (foundEntry) // if (cmap != null) { string ii = items[idxVal]; sampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabSampleNumber"); //if (cmap != null) idxVal = 0; foundEntry = columnIDX.TryGetValue("LabSampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } //cmap = importMap.FindItemsByTargetName("LabBatchNumber"); idxVal = 0; foundEntry = columnIDX.TryGetValue("LabBatchNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleMassKg", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { sampleMassKg = val; } } idxVal = 0; foundEntry = columnIDX.TryGetValue("DryMassKg", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { dryMassKg = val; } } idxVal = 0; foundEntry = columnIDX.TryGetValue("StandardSampleTypeName", out idxVal); if (foundEntry) { string ii = items[idxVal]; standardSampleTypeName = ii; } Sample xs = new Sample(); if (isDuplicateInterval == true) { xs = duplicateList.First(); } else { xs.SampleID = Guid.NewGuid(); xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; xs.SampleNumber = sampleNumber; xs.SampleMassKg = sampleMassKg; if (!string.IsNullOrWhiteSpace(standardSampleTypeName)) { var t = entityObj.DictionarySampleTypes.Select(f=>new {f.SampleTypeID, f.StandardSampleTypeName}).FirstOrDefault(f => f.StandardSampleTypeName == standardSampleTypeName); if (t != null) xs.SampleTypeID = t.SampleTypeID; } xs.DryMassKg = dryMassKg; } // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns bool assayUpdated = false; bool assayAdded = false; var results = new List<AssayGroupTestResult>(); foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = new AssayGroupTestResult(); /*bool assayResFound = false; if (isDuplicateInterval) { List<AssayGroupTestResult> testResults = assayQueries.GetDuplicateResult(xs.SampleID, cm.sourceColumnName); if (testResults.Count > 0) { testResult = testResults.First(); assayResFound = true; } }*/ //if(!assayResFound) // { testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.VersionUpdated = currentUpdateTimestamp; //} testResult.LabBatchNumber = labBatchNumber; testResult.LabSampleName = labsampleNumber; Decimal result = new Decimal(); if (items.Count >= cm.sourceColumnNumber) { bool parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); if (parsedOK) { testResult.LabResult = result; } testResult.LabResultText = items[cm.sourceColumnNumber]; } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } results.Add(testResult); //if (isDuplicateInterval == false) //{ assayAdded = true; //}else{ // if (!assayResFound) // { // entityObj.AssayGroupTestResult.Add(testResult); // assayAdded = true; // } // else { // assayUpdated = true; // } //} } var resultsToSave = (from o in results where !string.IsNullOrWhiteSpace(o.LabResultText) select o); if (!resultsToSave.Any()) continue; if (!isDuplicateInterval) entityObj.Samples.AddObject(xs); foreach (var save in resultsToSave) entityObj.AssayGroupTestResults.AddObject(save); if (assayAdded == true) { mos.recordsAdded++; } if (assayUpdated) { mos.recordsUpdated++; } tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; //Console.WriteLine("Processing line "+ct); items.Clear(); } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing assays to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing assays to database ", 0); mos.AddErrorMessage("Error writing assay data at line " + linesRead + ":\n" + line + "\n\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing connection to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }
private Guid FindParameterForAssayTypeName(string pName) { Guid pid = new Guid(); Parameter xp = new Parameter(); using (var entityObj = new NKDC(BaseImportTools.XSTRING, null)) { bool found = false; IQueryable<Parameter> res = entityObj.Parameters.Where(c => c.ParameterType.Equals("AssayTypeName") && c.ParameterName.Equals(pName)); foreach (Parameter xx in res) { found = true; pid = xx.ParameterID; break; } if (!found) { Parameter pp = new Parameter(); pid = Guid.NewGuid(); pp.ParameterID = pid; pp.ParameterType = "AssayTypeName"; pp.ParameterName = pName; pp.Description = pName; pp.VersionUpdated = DateTime.UtcNow; entityObj.Parameters.AddObject(pp); entityObj.SaveChanges(); } return pid; } }
public void Created(UserContext context) { _contentManagerSession.Store(context.User.ContentItem); SyncUsers(); var contact = GetContactID(context.User.UserName); if (contact == null) return; //Add user to default company using (new TransactionScope(TransactionScopeOption.Suppress)) { var d = new NKDC(ApplicationConnectionString, null, false); if (!(from o in d.Experiences where o.ContactID == contact && o.CompanyID != null select o).Any()) { var e = new Experience { ExperienceID = Guid.NewGuid(), ContactID = contact, CompanyID = COMPANY_DEFAULT, ExperienceName = string.Format("User - {0}", context.User.UserName), VersionUpdated = DateTime.UtcNow }; d.Experiences.AddObject(e); d.SaveChanges(); } } }
//private Object syncUsersLock = new Object(); //private static Guid syncUsersFirstThread = Guid.Empty; //private static Guid syncUsersLastThread = Guid.Empty; //private static ManualResetEvent mre = new ManualResetEvent(false); //public void SyncUsersManual() //{ // //Thread t = new Thread(new ParameterizedThreadStart(this.SyncUsersManualThread)); // //t.Start(Guid.NewGuid()); // bool acquiredLock = false; // Guid syncThread = Guid.NewGuid(); // Guid syncThreadGroup; // try // { // lock (syncUsersLock) // { // syncUsersLastThread = syncThread; // Monitor.TryEnter(syncUsersFirstThread, ref acquiredLock); // } // if (acquiredLock) // { // syncUsersFirstThread = syncThread; // SyncUsers(); // lock (syncUsersLock) // { // Monitor.Exit(syncUsersFirstThread); // mre.Set(); // mre.Reset(); // } // } // else // { // lock (syncUsersLock) // { // syncThreadGroup = syncUsersFirstThread; // } // mre.WaitOne(); // bool repeat = false; // lock (syncUsersLock) // { // if (syncUsersLastThread == syncThread) // Monitor.Enter(syncUsersFirstThread, ref acquiredLock); // } // if (repeat) // SyncUsersManual(); // } // } // catch // { // if (acquiredLock) // { // Monitor.Exit(syncUsersFirstThread); // } // } //} public void SyncUsers() { //Get Orchard Users & Roles var orchardUsers = _contentManager.Query<UserPart, UserPartRecord>().List(); var orchardRoles = _roleService.GetRoles().ToArray(); var orchardUserRoles = (from xur in _userRolesRepository.Table.ToArray() join xu in orchardUsers on xur.UserId equals xu.Id join xr in orchardRoles on xur.Role.Id equals xr.Id select new {xu.UserName, RoleName=xr.Name}).ToArray(); //Get Authmode & Then Update if (AuthenticationMode == System.Web.Configuration.AuthenticationMode.Forms) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new NKDC(ApplicationConnectionString,null); c.Users.Where(f => !(from o in c.Contacts select o.AspNetUserID).Contains(f.UserId)).Delete(); var r = from o in c.Roles.Include("Users") where o.ApplicationId == ApplicationID select o; var u = from o in c.Users.Include("Roles") where o.ApplicationId == ApplicationID select o; var updated = DateTime.UtcNow; //New User var nu = (from o in orchardUsers where !(from ou in u select ou.UserName).Contains(o.UserName) select o); foreach (var n in nu) { var user = new Users(); user.UserId = Guid.NewGuid(); user.UserName = n.UserName; user.ApplicationId = ApplicationID; user.LoweredUserName = n.UserName.ToLower(); user.LastActivityDate = updated; c.Users.AddObject(user); var contacts = (from o in c.Contacts where o.Username == user.UserName select o); foreach (var nc in contacts) { nc.AspNetUserID = user.UserId; } if (!contacts.Any()) { var contact = new Contact(); contact.ContactID = Guid.NewGuid(); contact.Username = user.UserName; contact.AspNetUserID = user.UserId; contact.DefaultEmail = n.Email; contact.ContactName = string.Format("Site User: {0}", user.UserName); contact.VersionUpdated = updated; contact.Surname = ""; contact.Firstname = ""; c.Contacts.AddObject(contact); } } //New Role var nr = (from o in orchardRoles where !(from or in r select or.RoleName).Contains(o.Name) select o); foreach (var n in nr) { var role = new Roles(); role.RoleName = n.Name; role.ApplicationId = ApplicationID; role.RoleId = Guid.NewGuid(); role.LoweredRoleName = n.Name.ToLower(); c.Roles.AddObject(role); } c.SaveChanges(); var users = c.Users.Include("Roles").Where(f => f.ApplicationId == ApplicationID).ToArray(); var roles = c.Roles.Where(f => f.ApplicationId == ApplicationID).ToArray(); foreach (var user in users) { foreach (var role in user.Roles.AsEnumerable()) { //Remove if (!orchardUserRoles.Any(f => f.RoleName == role.RoleName && f.UserName == user.UserName)) { c.E_SP_DropUserRole(user.UserId, role.RoleId); } } var newRoleIds = (from o in orchardUserRoles where !user.Roles.Select(f=>f.RoleName).Contains(o.RoleName) && o.UserName==user.UserName join m in roles on o.RoleName equals m.RoleName select m); foreach (var newRoleId in newRoleIds) { c.E_SP_AddUserRole(user.UserId, newRoleId.RoleId); //user.Roles.Add(newRoleId); } } c.SaveChanges(); //TODO Update per application //var ru = (from o in u.ToArray() where !(from ou in orchardUsers select ou.UserName).Contains(o.UserName) select o); //can just delete from users table //foreach (var rem in ru) //{ // //c.Users.DeleteObject(rem); //Doesn't work for multitenancy //} //c.SaveChanges(); } } else if (AuthenticationMode == System.Web.Configuration.AuthenticationMode.Windows) { //Module syncs only users - only all admin for now //Get AD Users // throw new NotImplementedException(); // get a DirectorySearcher object DirectorySearcher search = new DirectorySearcher(); // specify the search filter search.Filter = "(&(objectCategory=person)(objectClass=user))"; //search.Filter = "(&(objectClass=user)(anr=agrosser))"; //TEST //// specify which property values to return in the search search.PropertiesToLoad.Add("name"); // first name search.PropertiesToLoad.Add("givenName"); // first name search.PropertiesToLoad.Add("sn"); // last name search.PropertiesToLoad.Add("mail"); // smtp mail address search.PropertiesToLoad.Add("samaccountname"); // account name search.PropertiesToLoad.Add("memberof"); // groups search.PropertiesToLoad.Add("objectsid"); search.PropertiesToLoad.Add("objectguid"); search.PropertiesToLoad.Add("title"); // perform the search SearchResultCollection results = search.FindAll(); //.FindOne(); var sessionRoleCache = new Dictionary<string, string>(); var adusers = from SearchResult o in results select new { name = o.Properties["name"] != null && o.Properties["name"].Count > 0 ? string.Format("{0}", o.Properties["name"][0]) : null, givenName = o.Properties["givenName"] != null && o.Properties["givenName"].Count > 0 ? string.Format("{0}", o.Properties["givenName"][0]) : null, sn = o.Properties["sn"] != null && o.Properties["sn"].Count > 0 ? string.Format("{0}", o.Properties["sn"][0]) : null, email = o.Properties["mail"] != null && o.Properties["mail"].Count > 0 ? string.Format("{0}", o.Properties["mail"][0]) : null, samaccountname = o.Properties["samaccountname"] != null && o.Properties["samaccountname"].Count > 0 ? string.Format("{0}", o.Properties["samaccountname"][0]) : null, username = o.Properties["objectsid"] != null && o.Properties["objectsid"].Count > 0 ? ((NTAccount)(new SecurityIdentifier((byte[])o.Properties["objectsid"][0], 0)).Translate(typeof(NTAccount))).ToString() : null, guid = o.Properties["objectguid"] != null && o.Properties["objectguid"].Count > 0 ? new Guid((byte[])o.Properties["objectguid"][0]) : (Guid?)null, title = o.Properties["title"] != null && o.Properties["title"].Count > 0 ? string.Format("{0}", o.Properties["title"][0]) : null, roles = o.Properties["memberof"] != null ? (from string m in o.Properties["memberof"] select getNameFromFQDN(m, sessionRoleCache)).ToArray() : new string[] { } }; //Get NKD Users Contact[] nkdusers; using (new TransactionScope(TransactionScopeOption.Suppress)) { var d = new NKDC(ApplicationConnectionString,null); nkdusers = (from o in d.Contacts select o).ToArray(); //Sync AD, Orchard, NKD //New into NKD //We need firstname, surname var ad_new = (from o in adusers where o.givenName != null && o.sn != null && (o.guid.HasValue && !(from x in nkdusers select x.ContactID).Contains((Guid)o.guid)) || (!o.guid.HasValue && !(from x in nkdusers select x.Username.ToLowerInvariant()).Contains(o.username.ToLowerInvariant())) select o); foreach (var o in ad_new) { Contact c = new Contact(); c.ContactID = o.guid.HasValue ? o.guid.Value : Guid.NewGuid(); c.Username = o.username; c.Firstname = o.givenName; c.ContactName = string.Join(string.Empty, string.Format("{0} [{1}]", o.name, o.username).Take(120)); c.Surname = o.sn; c.DefaultEmail = o.email; d.Contacts.AddObject(c); } //Updates into NKD var ad_diff = from o in adusers from x in nkdusers where ((o.guid.HasValue && o.guid.Value == x.ContactID) || (o.username != null && x.Username != null && o.username.ToLowerInvariant() == x.Username.ToLowerInvariant())) //Things to update && ( o.givenName != x.Firstname || o.sn != x.Surname || o.email != x.DefaultEmail || o.name != x.ContactName ) select new { x.ContactID, o.givenName, o.sn, o.email, o.name, o.username }; foreach (var o in ad_diff) { var c = nkdusers.First(x => x.ContactID == o.ContactID); c.Firstname = o.givenName; c.ContactName = string.Join(string.Empty, string.Format("{0} [{1}]", o.name, o.username).Take(120)); c.Surname = o.sn; c.DefaultEmail = o.email; } d.SaveChanges(); } } }
public void UpdateSecurity(ISecured secured) { //TODO!!!: When writing security check for antecedentid = referenceid &/or version=0 //First check user and owner rights, black list and white list against record //Then get ok if (secured.SecurityID.HasValue) { //Call Edit if (CheckOwnership(secured, ActionPermission.Read | ActionPermission.Update)) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new NKDC(ApplicationConnectionString,null); if (secured.IsBlack) { var s = (from o in c.SecurityBlacklists where o.SecurityBlacklistID==secured.SecurityID && o.Version==0 && o.VersionDeletedBy==null select o).Single(); s.AccessorContactID = secured.AccessorContactID; s.AccessorApplicationID = secured.AccessorApplicationID; s.AccessorCompanyID = secured.AccessorCompanyID; s.AccessorProjectID = secured.AccessorProjectID; s.AccessorRoleID = secured.AccessorRoleID; s.CanCreate = secured.CanCreate; s.CanRead = secured.CanRead; s.CanDelete = secured.CanDelete; s.CanUpdate = secured.CanUpdate; s.VersionUpdated = DateTime.UtcNow; s.VersionUpdatedBy = secured.OwnerContactID; } else { var s = (from o in c.SecurityWhitelists where o.SecurityWhitelistID == secured.SecurityID && o.Version== 0 && o.VersionDeletedBy == null select o).Single(); s.AccessorContactID = secured.AccessorContactID; s.AccessorApplicationID = secured.AccessorApplicationID; s.AccessorCompanyID = secured.AccessorCompanyID; s.AccessorProjectID = secured.AccessorProjectID; s.AccessorRoleID = secured.AccessorRoleID; s.CanCreate = secured.CanCreate; s.CanRead = secured.CanRead; s.CanDelete = secured.CanDelete; s.CanUpdate = secured.CanUpdate; s.VersionUpdated = DateTime.UtcNow; s.VersionUpdatedBy = secured.OwnerContactID; } c.SaveChanges(); } } else throw new AuthorityException(string.Format("Incorrect permission for action: \"Update\" Contact: {0} Record: {1}", secured.AccessorContactID, secured.OwnerReferenceID)); } else { //Call New if (CheckOwnership(secured, ActionPermission.Read | ActionPermission.Create)) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new NKDC(ApplicationConnectionString,null); if (secured.IsBlack) { var s = new SecurityBlacklist { SecurityBlacklistID = Guid.NewGuid(), AccessorContactID = secured.AccessorContactID, AccessorApplicationID = secured.AccessorApplicationID, AccessorCompanyID = secured.AccessorCompanyID, AccessorProjectID = secured.AccessorProjectID, AccessorRoleID = secured.AccessorRoleID, OwnerApplicationID = secured.OwnerApplicationID, OwnerCompanyID = secured.OwnerCompanyID, OwnerContactID = secured.OwnerContactID, OwnerEntitySystemType = secured.OwnerEntitySystemType, OwnerField = secured.OwnerField, OwnerProjectID = secured.OwnerProjectID, OwnerReferenceID = secured.OwnerReferenceID, OwnerTableType = secured.OwnerTableType, CanCreate = secured.CanCreate, CanRead = secured.CanRead, CanDelete = secured.CanDelete, CanUpdate = secured.CanUpdate, VersionOwnerContactID = secured.OwnerContactID, VersionOwnerCompanyID = secured.OwnerCompanyID, VersionUpdated = DateTime.UtcNow, VersionUpdatedBy = secured.OwnerContactID }; c.SecurityBlacklists.AddObject(s); } else { var s = new SecurityWhitelist { SecurityWhitelistID = Guid.NewGuid(), AccessorContactID = secured.AccessorContactID, AccessorApplicationID = secured.AccessorApplicationID, AccessorCompanyID = secured.AccessorCompanyID, AccessorProjectID = secured.AccessorProjectID, AccessorRoleID = secured.AccessorRoleID, OwnerApplicationID = secured.OwnerApplicationID, OwnerCompanyID = secured.OwnerCompanyID, OwnerContactID = secured.OwnerContactID, OwnerEntitySystemType = secured.OwnerEntitySystemType, OwnerField = secured.OwnerField, OwnerProjectID = secured.OwnerProjectID, OwnerReferenceID = secured.OwnerReferenceID, OwnerTableType = secured.OwnerTableType, CanCreate = secured.CanCreate, CanRead = secured.CanRead, CanDelete = secured.CanDelete, CanUpdate = secured.CanUpdate, VersionOwnerContactID = secured.OwnerContactID, VersionOwnerCompanyID = secured.OwnerCompanyID, VersionUpdated = DateTime.UtcNow, VersionUpdatedBy = secured.OwnerContactID }; c.SecurityWhitelists.AddObject(s); } c.SaveChanges(); } } else throw new AuthorityException(string.Format("Incorrect permission for action: \"Create\" Contact: {0} Record: {1}", secured.AccessorContactID, secured.OwnerReferenceID)); } }
public void DeleteSecurity(ISecured secured) { //TODO!!!: When writing security check for antecedentid = referenceid &/or version=0 if (secured.SecurityID.HasValue) { if (CheckOwnership(secured, ActionPermission.Read | ActionPermission.Delete)) { using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new NKDC(ApplicationConnectionString,null); if (secured.IsBlack) { var s = (from o in c.SecurityBlacklists where o.SecurityBlacklistID == secured.SecurityID && o.Version == 0 && o.VersionDeletedBy == null select o).Single(); c.SecurityBlacklists.DeleteObject(s); } else { var s = (from o in c.SecurityWhitelists where o.SecurityWhitelistID == secured.SecurityID && o.Version == 0 && o.VersionDeletedBy == null select o).Single(); c.SecurityWhitelists.DeleteObject(s); } c.SaveChanges(); } } else throw new AuthorityException(string.Format("Incorrect permission for action: \"Delete\" Contact: {0} Record: {1}", secured.AccessorContactID, secured.OwnerReferenceID)); } else throw new NotSupportedException("Can not delete a security record without an ID."); }
/// <summary> /// Set an item of meta data with the given types and cvalues /// </summary> /// <param name="blockModelID"></param> /// <param name="metaDataType"></param> /// <param name="tableType"></param> /// <param name="cont"></param> private static void SetMetaDataItem(Guid blockModelID, string metaDataType, string tableType, string cont) { try { using (var entityObj = new NKDC(BaseImportTools.XSTRING, null)) { MetaData dt = new MetaData(); dt.MetaDataID = Guid.NewGuid(); dt.MetaDataType = metaDataType; dt.ContentToIndex = cont; MetaDataRelation rel = new MetaDataRelation(); rel.MetaDataRelationID = Guid.NewGuid(); rel.MetaDataID = dt.MetaDataID; rel.TableType = tableType; rel.ReferenceID = blockModelID; entityObj.MetaDatas.AddObject(dt); entityObj.SaveChanges(); entityObj.MetaDataRelations.AddObject(rel); entityObj.SaveChanges(); } } catch (Exception ex) { string msg = "Error setting domains list "+ex.ToString(); } }
public bool DisableContact(Guid contactID) { string username = null; string email = null; using (new TransactionScope(TransactionScopeOption.Suppress)) { var c = new NKDC(ApplicationConnectionString, null); var u = (from o in c.Contacts.Where(f => f.ContactID == contactID) select o).FirstOrDefault(); if (u == null) return false; u.VersionCertainty = -1; username = u.Username; email = u.DefaultEmail; c.SaveChanges(); } var users = _contentManager.Query<UserPart, UserPartRecord>() .Where(user => user.NormalizedUserName == username || user.Email == email) .List(); foreach (var user in users) { user.RegistrationStatus = UserStatus.Pending; } return true; }
internal List<BlockModelMetadata> SetBlockModelMetaData(Guid blockModelGUID, ImportDataMap testMap, string connString) { using (var entityObj = new NKDC(connString, null)) { List<BlockModelMetadata> metaDataItems = new List<BlockModelMetadata>(); foreach (ColumnMap cmap in testMap.columnMap) { BlockModelMetadata metaData = new BlockModelMetadata(); metaData.BlockModelID = blockModelGUID; metaData.BlockModelMetadataID = Guid.NewGuid(); metaData.IsColumnData = true; string colName = cmap.sourceColumnName; string columnValue = cmap.defaultValue; Parameter param1 = new Parameter(); param1.ParameterName = cmap.targetColumnName; // source column param1.ParameterType = "FieldName"; if (entityObj.BlockModelMetadatas.Where(f => f.BlockModelID == blockModelGUID && f.Parameter.Description == cmap.sourceColumnName).Any()) param1.Description = cmap.sourceColumnName = string.Format("{0}_{1}", cmap.sourceColumnName, Guid.NewGuid()); else param1.Description = cmap.sourceColumnName; // target column param1.ParameterID = Guid.NewGuid(); if (cmap.sourceColumnName != null && cmap.sourceColumnName.ToLower().Contains("ppm")) { param1.UnitID = new Guid("E91773A4-2762-4EDE-8510-38F78FAF981D");// TODO: HACK - get the proper guid for the current unit type by querying database } else if (cmap.sourceColumnName != null && cmap.sourceColumnName.ToLower().Contains("pct")) { param1.UnitID = new Guid("AEDBBE0A-6A94-419F-8B43-A98CE942669A");// TODO: HACK - get the proper guid for the current unit type by querying database } metaData.BlockModelMetadataText = cmap.targetColumnName; metaData.ParameterID = param1.ParameterID; entityObj.Parameters.AddObject(param1); entityObj.BlockModelMetadatas.AddObject(metaData); metaDataItems.Add(metaData); entityObj.SaveChanges(); } return metaDataItems; } }
private AssayGroupWorkflowProcedure GetAssayGroupWorkflowProcedure(NKDC entityObj, string stage, AssayGroupWorkflow assayGroupWorkflow) { AssayGroupWorkflowProcedure agw = null; //IQueryable<AssayGroupWorkflowProcedure> res = entityObj.AssayGroupWorkflowProcedures.Where(c => c.WorkflowStateName.Trim().Equals(stage.Trim()) && c.AssayGroupWorkflowID == assayGroupWorkflow.AssayGroupWorkflowID); //foreach (AssayGroupWorkflowProcedure xx in res) //{ // agw = xx; //} //if (agw == null) //{ agw = new AssayGroupWorkflowProcedure(); agw.AssayGroupWorkflowID = assayGroupWorkflow.AssayGroupWorkflowID; agw.AssayGroupWorkflow = assayGroupWorkflow; agw.AssayGroupWorkflowProcedureID = Guid.NewGuid(); agw.WorkflowStateName = stage; agw.Sequence = WorkflowProcedureSequenceNumber; WorkflowProcedureSequenceNumber++; entityObj.AssayGroupWorkflowProcedures.AddObject(agw); entityObj.SaveChanges(); //} return agw; }
private AssayGroupWorkflow GetAssayGroupWorkflow(NKDC entityObj, string programType, Guid? assayGroupID) { AssayGroupWorkflow agw = null; IQueryable<AssayGroupWorkflow> res = entityObj.AssayGroupWorkflows.Where(c => c.WorkflowName.Trim().Equals(programType.Trim()) && c.AssayGroupID == assayGroupID); foreach (AssayGroupWorkflow xx in res) { agw = xx; } if (agw == null) { agw = new AssayGroupWorkflow(); agw.AssayGroupID = assayGroupID; agw.AssayGroupWorkflowID = Guid.NewGuid(); agw.WorkflowName = programType; entityObj.AssayGroupWorkflows.AddObject(agw); entityObj.SaveChanges(); } return agw; }
//private AssayGroupSubsample GetAssayGroupSubSample(NKDC entityObj, Guid agGuid, Guid? workflowID, Sample originalSample, int seqNum) //{ // return agw; //} private AssayGroupSubsamplePrecondition GetAssayGroupPrecondition(NKDC entityObj, string preconditionName, string preconditionType, Guid ssGuid) { AssayGroupSubsamplePrecondition agw = null; //IQueryable<AssayGroupSubsamplePrecondition> res = entityObj.AssayGroupSubsamplePreconditions.Where(c => c.PreconditionName.Trim().Equals(preconditionName.Trim()) && c.AssayGroupSubsampleID == ssGuid); //foreach (AssayGroupSubsamplePrecondition xx in res) //{ // agw = xx; //} if (agw == null) { agw = new AssayGroupSubsamplePrecondition(); if (!string.IsNullOrWhiteSpace(preconditionName)) agw.PreconditionName = string.Format("{0}", preconditionName).Trim(); else return null; agw.AssayGroupSubsampleID = ssGuid; agw.AssayGroupSubsamplePreconditionID = Guid.NewGuid(); //TODO - make this more efficient by storing the Parameters in a dicitonary so lookup is fast rather than // hitting the DB for every record Guid gParam = this.FindParameter("AssayPrecondition", preconditionType); agw.PreconditionParameterID = gParam; //agw.PreconditionParameterID = new Guid("6f49ded6-fe9b-487f-be48-eb8c88d9beef"); //Sixe mm TODO FIX //+32 bigger than 32 (Size fractions) //-32 smaller than 32 //+16 bigger than 16 //-16+8 smaller than 16, bigger than 8 //F1.45 Floatation (density floats) //F1.45 //S1.70-F1.80 //S1.80-F2.00 //S2.00 (sinks) //P2 Froth duration (30 sec) //P3 if (agw.PreconditionName.Length > 0) //Density { var isNumber = new Regex(RegexUtils.REGEX_IS_NUMBER); string[] numbers = agw.PreconditionName.Split( new char[] {'+', '-', ' '}, StringSplitOptions.RemoveEmptyEntries); string number; if (numbers.Length == 1 && isNumber.IsMatch(agw.PreconditionName)) { agw.Precondition = Convert.ToDecimal(agw.PreconditionName); if (agw.Precondition < 0m) agw.PreconditionMaximum = agw.Precondition; else agw.PreconditionMinimum = agw.Precondition; } else if (numbers.Length == 2 && isNumber.IsMatch(string.Join("", numbers))) { if (agw.PreconditionName[0] == '-') { agw.PreconditionMaximum = Convert.ToDecimal(numbers[0]); agw.PreconditionMinimum = Convert.ToDecimal(numbers[1]); } else { agw.PreconditionMaximum = Convert.ToDecimal(numbers[1]); agw.PreconditionMinimum = Convert.ToDecimal(numbers[0]); } } else if (agw.PreconditionName.Length > 1 && agw.PreconditionName[0] == '<' && isNumber.IsMatch(number = agw.PreconditionName.Substring(1))) { agw.PreconditionMaximum = Convert.ToDecimal(number); } else if (agw.PreconditionName.Length > 1 && agw.PreconditionName[0] == '>' && isNumber.IsMatch(number = agw.PreconditionName.Substring(1))) { agw.PreconditionMinimum = Convert.ToDecimal(number); } else if (preconditionType == "Wash fraction") //Todo hack { var sinkFloat = new Regex(RegexUtils.REGEX_IS_SINKFLOAT).Match(agw.PreconditionName.ToUpper()); var s = sinkFloat.Groups["sink"].Value; var f = sinkFloat.Groups["float"].Value; if (isNumber.IsMatch(s) || isNumber.IsMatch(f)) //check S-F { if (!string.IsNullOrWhiteSpace(f)) agw.PreconditionMaximum = Convert.ToDecimal(f); if (!string.IsNullOrWhiteSpace(s)) agw.PreconditionMinimum = Convert.ToDecimal(s); } else if (!string.IsNullOrWhiteSpace((f=new Regex(RegexUtils.REGEX_IS_CUMULATIVEFLOAT).Match(agw.PreconditionName.ToUpper()).Groups["cumulative"].Value))) { agw.PreconditionMaximum = Convert.ToDecimal(f); } else { switch (agw.PreconditionName.ToUpper()) // Default to SI units (s) time //Todo should separate columns HACK! Inherited from XLS { case "P1": //5sec intervals for 120 seconds agw.PreconditionMaximum = 120m; break; case "P2": //15s agw.PreconditionMinimum = 120m; agw.PreconditionMaximum = 135m; break; case "P3": //30s agw.PreconditionMinimum = 135m; agw.PreconditionMaximum = 150m; break; case "P4": //60s agw.PreconditionMinimum = 150m; agw.PreconditionMaximum = 210m; break; case "P5": //90s agw.PreconditionMinimum = 210m; agw.PreconditionMaximum = 300m; break; case "T1": //Recovery from P1 agw.PreconditionMaximum = 120m; break; case "T2": //Recovery from P5 agw.PreconditionMinimum = 120m; agw.PreconditionMaximum = 300m; break; default: break; } } } } entityObj.AssayGroupSubsamplePreconditions.AddObject(agw); entityObj.SaveChanges(); } return agw; }
internal void AddCoalQualityData(ModelImportStatus mos, Stream fileStream, FormatSpecification.ImportDataMap importMap, int batchSize, Action<string, double> UpdateStatus, int approxNumLines, string connectionString, Guid NKDProjectID, bool checkForDuplicates, bool doImportOverwrite) { WorkflowProcedureSequenceNumber = 1; Guid? lastHoleID = new Guid(); string lastStage = ""; decimal lastFromDepth = -999999; decimal lastToDepth = -999999; bool commitToDB = true; DateTime currentUpdateTimestamp = DateTime.UtcNow; // first set up an assay group object - we can do this through the edm using (var entityObj = new NKDC(connectionString, null)) { Guid agGuid = Guid.NewGuid(); AssayGroup ag = new AssayGroup(); ag.AssayGroupID = agGuid; ag.ProjectID = NKDProjectID; ag.AssayGroupName = "Manual import"; ag.Comment = "From file " + importMap.mapOriginalDataFile; ag.Entered = currentUpdateTimestamp; ag.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroups.AddObject(ag); if (commitToDB) { entityObj.SaveChanges(); } // set up the assay test columns - one of these for each test type Dictionary<ColumnMap, Guid> resultsColumns = new Dictionary<ColumnMap, Guid>(); Dictionary<Guid, AssayGroupTest> assayGroups = new Dictionary<Guid, AssayGroupTest>(); ColumnMap cmProgram = null; ColumnMap cmStage = null; ColumnMap cmSizeFraction = null; ColumnMap cmWashFraction = null; foreach (ColumnMap cim in importMap.columnMap) { if (cim.targetColumnName.Trim().StartsWith("[RESULT")) { // this is a test category resultsColumns.Add(cim, Guid.NewGuid()); } else if (cim.targetColumnName.Trim().StartsWith("[PROGRAM")) { cmProgram = cim; } else if (cim.targetColumnName.Trim().StartsWith("[STAGE")) { cmStage = cim; } else if (cim.targetColumnName.Trim().StartsWith("[SIZE FRACTION")) { cmSizeFraction = cim; } else if (cim.targetColumnName.Trim().StartsWith("[WASH FRACTION")) { cmWashFraction = cim; } } UpdateStatus("Setting up assay tests ", 2); foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; AssayGroupTest xt = new AssayGroupTest(); string ss1 = ""; if (cm.sourceColumnName != null && cm.sourceColumnName.Length > 15) { ss1 = cm.sourceColumnName.Substring(0, 16); } else { ss1 = cm.sourceColumnName; } Guid pid = FindParameter("AssayTypeName", cm.sourceColumnName); xt.ParameterID = pid; xt.AssayTestName = ss1; xt.AssayGroupID = agGuid; xt.AssayGroupTestID = g; xt.VersionUpdated = currentUpdateTimestamp; entityObj.AssayGroupTests.AddObject(xt); assayGroups.Add(g, xt); if (commitToDB) { entityObj.SaveChanges(); } } // iterate through the data lines int ct = 1; int linesRead = 0; SqlConnection connection = null; SqlConnection secondaryConnection = null; //List<string> uniqueDomains = new List<string>(); // get a connection to the database try { connection = new SqlConnection(connectionString); connection.Open(); secondaryConnection = new SqlConnection(connectionString); secondaryConnection.Open(); bool hasDuplicateIntervals = false; SqlTransaction trans; trans = connection.BeginTransaction(); List<SqlCommand> commands = new List<SqlCommand>(); int tb = 0; int transactionBatchLimit = batchSize; // open the filestream and read the first line StreamReader sr = null; try { sr = new StreamReader(fileStream); } catch (Exception ex) { mos.AddErrorMessage("Error getting data stream for input data:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_LOADING_FILE; } string line = null; float pct = 0; float bct = 1; // report every X records int repCount = 0; float fNumLines = (float)approxNumLines; Dictionary<string, Guid> holeIDLookups = new Dictionary<string, Guid>(); Dictionary<string, int> columnIDX = new Dictionary<string, int>(); int fkLookupCount = 0; BaseImportTools.PopulateCMapShortcut(importMap, columnIDX); ColumnMap headerCmap = importMap.FindItemsByTargetName("HeaderID"); int seqNum = 1; if (sr != null) { while ((line = sr.ReadLine()) != null) { repCount++; pct = ((float)linesRead / (float)approxNumLines) * 100.0f; bct++; linesRead++; if (ct >= importMap.dataStartLine) { // digest a row of input data List<string> items = BaseImportTools.ParseTestLine(line, importMap.inputDelimiter); Guid holeID = new Guid(); Decimal fromDepth = new Decimal(-9999999999); Decimal toDepth = new Decimal(-9999999999); string sampleNumber = null; string sampleName = null; string labBatchNumber = null; string labsampleNumber = null; // find mapped values by name int idxVal = 0; // -- Get the hole ID foreign key relation bool foundEntry = columnIDX.TryGetValue("HeaderID", out idxVal); bool foundHole = false; string holeName = ""; if (foundEntry) { string lookupByName = "HoleName"; string lookupValue = items[idxVal]; holeName = lookupValue; bool lv = holeIDLookups.ContainsKey(lookupValue); if (!lv) { string headerGUID = ForeignKeyUtils.FindFKValueInOther(lookupValue, headerCmap, secondaryConnection, false, lookupByName, NKDProjectID); if (headerGUID == null) { // this means we have not found the specified records in the header table // Report on issue and skip line } else { foundHole = true; holeID = new Guid(headerGUID); holeIDLookups.Add(lookupValue, holeID); fkLookupCount++; } } else { holeIDLookups.TryGetValue(lookupValue, out holeID); foundHole = true; } } if (!foundHole) { mos.AddErrorMessage("Failed to find hole " + holeName + ". Skipping record at line " + linesRead + "."); mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; mos.recordsFailed++; continue; } else { bool hasFrom = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("FromDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { fromDepth = val; hasFrom = true; } } bool hasTo = false; idxVal = 0; foundEntry = columnIDX.TryGetValue("ToDepth", out idxVal); if (foundEntry) { string ii = items[idxVal]; Decimal val = 0; bool isOk = Decimal.TryParse(ii, out val); if (isOk) { toDepth = val; hasTo = true; } } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleID", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("SampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; sampleName = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("LabSampleName", out idxVal); if (foundEntry) { string ii = items[idxVal]; labsampleNumber = ii; } idxVal = 0; foundEntry = columnIDX.TryGetValue("LabBatchNumber", out idxVal); if (foundEntry) { string ii = items[idxVal]; labBatchNumber = ii; } // Now iddentify the program, Stage, Size fraction and wash fraction // get the program text string programType = null; if (cmProgram != null) { programType = items[cmProgram.sourceColumnNumber]; } string stage = null; if (cmStage != null) { stage = items[cmStage.sourceColumnNumber]; } string sizeFraction = null; if (cmSizeFraction != null) { sizeFraction = items[cmSizeFraction.sourceColumnNumber]; } string washFraction = null; if (cmWashFraction != null) { washFraction = items[cmWashFraction.sourceColumnNumber]; } IQueryable<AssayGroupSubsample> toUpdate = null; bool isDuplicate = false; var washID = (from o in entityObj.Parameters where o.ParameterType=="AssayPrecondition" && o.ParameterName=="Wash fraction" select o.ParameterID).FirstOrDefault(); var sizeID = (from o in entityObj.Parameters where o.ParameterType == "AssayPrecondition" && o.ParameterName == "Size fraction" select o.ParameterID).FirstOrDefault(); if (checkForDuplicates) { if (hasFrom && hasTo) { // here we need to check that not duplicated toUpdate = (from o in entityObj.AssayGroupSubsamples where o.OriginalSample.HeaderID == holeID && o.OriginalSample.FromDepth == fromDepth && o.OriginalSample.ToDepth == toDepth && o.AssayGroupWorkflowProcedure.WorkflowStateName == stage && o.AssayGroupWorkflowProcedure.AssayGroupWorkflow.WorkflowName == programType && (sizeFraction.Trim() == "" || o.AssayGroupSubsamplePrecondition.Any(f => f.PreconditionName == sizeFraction && f.PreconditionParameterID==sizeID)) && (washFraction.Trim() == "" || o.AssayGroupSubsamplePrecondition.Any(f => f.PreconditionName == washFraction && f.PreconditionParameterID==washID)) select o); if (toUpdate.Any()) { isDuplicate = true; } } if (isDuplicate) { hasDuplicateIntervals = true; mos.AddWarningMessage("Duplicate interval for hole " + holeName + " at depth " + fromDepth + " to " + toDepth); UpdateStatus("Duplicate interval at " + holeName + " " + fromDepth + ", " + toDepth, pct); if (!doImportOverwrite) { mos.recordsFailed++; continue; } else { foreach (var upd in toUpdate) upd.Sequence = seqNum; } } } Sample xs = null; if (isDuplicate == true) { xs = toUpdate.First().OriginalSample; } else { xs = (from o in entityObj.Samples where o.HeaderID==holeID && o.FromDepth==fromDepth && o.ToDepth==toDepth select o).FirstOrDefault(); if (xs == null) { xs = new Sample(); xs.SampleID = Guid.NewGuid(); xs.SampleName = sampleName; xs.SampleNumber = sampleNumber; xs.FromDepth = fromDepth; xs.ToDepth = toDepth; xs.HeaderID = holeID; xs.VersionUpdated = currentUpdateTimestamp; entityObj.Samples.AddObject(xs); } } // see if the interfal has changed, wherby we will need to reset the sequence ID if (holeID != lastHoleID) { if (fromDepth != lastFromDepth && toDepth != lastToDepth) { // new interval WorkflowProcedureSequenceNumber = 1; } } if (!stage.Trim().Equals(lastStage)) { WorkflowProcedureSequenceNumber = 1; } lastHoleID = holeID; lastFromDepth = fromDepth; lastToDepth = toDepth; lastStage = stage; if (!isDuplicate) { AssayGroupWorkflow agWorkflowProgram = GetAssayGroupWorkflow(entityObj, programType, agGuid); AssayGroupWorkflowProcedure agWorkflowStage = GetAssayGroupWorkflowProcedure(entityObj, stage, agWorkflowProgram); AssayGroupSubsample agSS = new AssayGroupSubsample(); agSS.AssayGroupID = agGuid; agSS.FromDepth = fromDepth; agSS.ToDepth = toDepth; agSS.Sequence = seqNum; agSS.AssayGroupSubsampleID = Guid.NewGuid(); agSS.SampleAntecedentID = xs.SampleID; agSS.OriginalSample = xs; agSS.AssayGroupWorkflowProcedureID = agWorkflowStage.AssayGroupWorkflowProcedureID; agSS.AssayGroupWorkflowProcedure = agWorkflowStage; entityObj.AssayGroupSubsamples.AddObject(agSS); entityObj.SaveChanges(); AssayGroupSubsamplePrecondition agSizeFraction = GetAssayGroupPrecondition(entityObj, sizeFraction, "Size fraction", agSS.AssayGroupSubsampleID); AssayGroupSubsamplePrecondition agWashFraction = GetAssayGroupPrecondition(entityObj, washFraction, "Wash fraction", agSS.AssayGroupSubsampleID); toUpdate = (new[] { agSS }).AsQueryable(); } if (isDuplicate) entityObj.SaveChanges(); foreach (var upd in toUpdate.ToList()) { // now pick out all the mapped values // iterate over all [ASSAY RESULT] columns foreach (KeyValuePair<ColumnMap, Guid> kvp in resultsColumns) { ColumnMap cm = kvp.Key; Guid g = kvp.Value; // this is the AssayGroupTestID AssayGroupTestResult testResult = null; Decimal result = default(decimal); string resultText = null; bool parsedOK = false; if (items.Count >= cm.sourceColumnNumber) { parsedOK = Decimal.TryParse(items[cm.sourceColumnNumber], out result); resultText = items[cm.sourceColumnNumber]; } else { mos.AddWarningMessage("Line " + linesRead + " contains too few columns to read " + cm.sourceColumnName); } if (string.IsNullOrWhiteSpace(resultText)) continue; if (!isDuplicate) { testResult = new AssayGroupTestResult(); testResult.AssayGroupSubsampleID = upd.AssayGroupSubsampleID; testResult.AssayGroupTestResultID = Guid.NewGuid(); testResult.AssayGroupTestID = g; testResult.SampleID = xs.SampleID; testResult.LabBatchNumber = labBatchNumber; entityObj.AssayGroupTestResults.AddObject(testResult); testResult.VersionUpdated = currentUpdateTimestamp; if (parsedOK) testResult.LabResult = result; testResult.LabResultText = resultText; //testResult.LabSampleNumber = labsampleNumber; mos.recordsAdded++; } else { var tempRes = (parsedOK) ? result : default(decimal?); entityObj.AssayGroupTestResults.Where(f=> f.AssayGroupSubsampleID == upd.AssayGroupSubsampleID && f.AssayGroupTest.Parameter.ParameterName == cm.sourceColumnName ) .Update((f) => new AssayGroupTestResult { LabResult = tempRes, LabResultText = resultText, VersionUpdated = currentUpdateTimestamp }); mos.recordsUpdated++; } } } seqNum++; tb++; } } if (commitToDB) { if (tb == transactionBatchLimit) { entityObj.SaveChanges(); UpdateStatus("Writing assays to DB (" + ct + " entries)", pct); tb = 0; } } ct++; } entityObj.SaveChanges(); } if (hasDuplicateIntervals) { mos.finalErrorCode = ModelImportStatus.DATA_CONSISTENCY_ERROR; } string numFKLookups = "FK lookups " + fkLookupCount; mos.linesReadFromSource = ct - 1; UpdateStatus("Finished writing coal quality data to database.", 0); } catch (Exception ex) { UpdateStatus("Error writing qualities to database ", 0); mos.AddErrorMessage("Error writing data at line " + linesRead + ":\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } finally { try { connection.Close(); secondaryConnection.Close(); fileStream.Close(); } catch (Exception ex) { mos.AddErrorMessage("Error closing conenction to database:\n" + ex.ToString()); mos.finalErrorCode = ModelImportStatus.ERROR_WRITING_TO_DB; } } mos.linesReadFromSource = linesRead; } }