/// <summary> /// /// </summary> /// <param name="setName"></param> /// <param name="newSetName"></param> /// <returns></returns> public bool CloneModule(string setName, string newSetName) { this.origSetName = setName; this.newSetName = newSetName; using (var db = new CSET_Context()) { // clone the SETS record var origSet = db.SETS.Where(x => x.Set_Name == this.origSetName).FirstOrDefault(); if (origSet == null) { return(false); } var copySet = (SETS)db.Entry(origSet).CurrentValues.ToObject(); copySet.Set_Name = this.newSetName; copySet.Full_Name = origSet.Full_Name .Substring(0, Math.Min(origSet.Full_Name.Length, 240)) + " (copy)"; copySet.Is_Custom = true; db.SETS.Add(copySet); db.SaveChanges(); CloneRequirements(copySet); // indicate that the cloning took place return(true); } }
public static async Task SaveImport(ExternalStandard externalStandard, PerformContext context) { var logger = new HangfireLogger(context); var result = await externalStandard.ToSet(logger); if (result.IsSuccess) { try { using (var db = new CSET_Context()) { db.SETS.Add(result.Result); foreach (var question in result.Result.NEW_REQUIREMENT.SelectMany(s => s.NEW_QUESTIONs()).Where(s => s.Question_Id != 0).ToList()) { db.Entry(question).State = EntityState.Unchanged; } await db.SaveChangesAsync(); } } //catch (DbEntityValidationException e) //{ // foreach(var error in e.EntityValidationErrors) // { // foreach(var validationError in error.ValidationErrors) // { // result.LogError(validationError.ErrorMessage); // } // } // throw new Exception(String.Join("\r\n", result.ErrorMessages)); //} catch (SqlException e) { result.LogError(e.Message); } catch (Exception e) { logger.Log("An error was encountered when adding the module to the database. Please try again"); throw e; } } else { throw new Exception(String.Join("\r\n", result.ErrorMessages)); } }
public async Task ProcessCSETAssessmentImport(byte[] zipFileFromDatabase, int currentUserId) { using (CSET_Context context = new CSET_Context()) { //* read from db and set as memory stream here. using (Stream fs = new MemoryStream(zipFileFromDatabase)) { ZipArchive zip = new ZipArchive(fs); StreamReader r = new StreamReader(zip.GetEntry("model.json").Open()); string jsonObject = r.ReadToEnd(); // Apply any data updates to older versions ImportUpgradeManager upgrader = new ImportUpgradeManager(); jsonObject = upgrader.Upgrade(jsonObject); try { UploadAssessmentModel model = (UploadAssessmentModel)JsonConvert.DeserializeObject(jsonObject, new UploadAssessmentModel().GetType()); foreach (var doc in model.CustomStandardDocs) { var genFile = context.GEN_FILE.FirstOrDefault(s => s.File_Name == doc); if (genFile == null) { StreamReader docReader = new StreamReader(zip.GetEntry(doc + ".json").Open()); var docModel = JsonConvert.DeserializeObject <ExternalDocument>(docReader.ReadToEnd()); genFile = docModel.ToGenFile(); var extension = Path.GetExtension(genFile.File_Name).Substring(1); genFile.File_Type_ = context.FILE_TYPE.Where(s => s.File_Type1 == extension).FirstOrDefault(); try { context.FILE_REF_KEYS.Add(new FILE_REF_KEYS { Doc_Num = genFile.Doc_Num }); await context.SaveChangesAsync(); } catch { } context.GEN_FILE.Add(genFile); context.SaveChanges(); } } foreach (var standard in model.CustomStandards) { var sets = context.SETS.Where(s => s.Set_Name.Contains(standard)).ToList(); SETS set = null; StreamReader setReader = new StreamReader(zip.GetEntry(standard + ".json").Open()); var setJson = setReader.ReadToEnd(); var setModel = JsonConvert.DeserializeObject <ExternalStandard>(setJson); var originalSetName = setModel.ShortName; foreach (var testSet in sets) { setModel.ShortName = testSet.Short_Name; var testSetJson = JsonConvert.SerializeObject(testSet.ToExternalStandard(), Formatting.Indented); if (testSetJson == setJson) { set = testSet; break; } else { setModel.ShortName = originalSetName; } } if (set == null) { int incr = 1; while (sets.Any(s => s.Short_Name == setModel.ShortName)) { setModel.ShortName = originalSetName + " " + incr; incr++; } var setResult = await setModel.ToSet(); if (setResult.IsSuccess) { context.SETS.Add(setResult.Result); foreach (var question in setResult.Result.NEW_REQUIREMENT.SelectMany(s => s.NEW_QUESTIONs()).Where(s => s.Question_Id != 0).ToList()) { context.Entry(question).State = EntityState.Unchanged; } try { await context.SaveChangesAsync(); } catch (Exception e) { throw (e); } //Set the GUID at time of export so we are sure it's right!!! model.jANSWER = model.jANSWER.Where(s => s.Is_Requirement).GroupJoin(setResult.Result.NEW_REQUIREMENT, s => s.Custom_Question_Guid, req => new Guid(new MD5CryptoServiceProvider().ComputeHash(Encoding.Default.GetBytes(originalSetName + "|||" + req.Requirement_Title + "|||" + req.Requirement_Text))).ToString(), (erea, s) => { var req = s.FirstOrDefault(); if (req != null) { erea.Question_Or_Requirement_Id = req.Requirement_Id; } return(erea); }).Concat(model.jANSWER.Where(s => !s.Is_Requirement).GroupJoin(setResult.Result.NEW_QUESTION, s => s.Custom_Question_Guid, req => new Guid(new MD5CryptoServiceProvider().ComputeHash(Encoding.Default.GetBytes(req.Simple_Question))).ToString(), (erer, s) => { var req = s.FirstOrDefault(); if (req != null) { erer.Question_Or_Requirement_Id = req.Question_Id; } return(erer); })).ToList(); } } foreach (var availableStandard in model.jAVAILABLE_STANDARDS.Where(s => s.Set_Name == Regex.Replace(originalSetName, @"\W", "_") && s.Selected)) { availableStandard.Set_Name = Regex.Replace(setModel.ShortName, @"\W", "_"); } } string email = context.USERS.Where(x => x.UserId == currentUserId).First().PrimaryEmail; Importer import = new Importer(); int newAssessmentId = import.RunImportManualPortion(model, currentUserId, email, context); import.RunImportAutomatic(newAssessmentId, jsonObject, context); //NOTE THAT THIS ENTRY WILL ONLY COME FROM A OLD .cset file //IMPORT ZipArchiveEntry importLegacyDiagram = zip.GetEntry("Diagram.csetd"); if (importLegacyDiagram != null) { StreamReader ldr = new StreamReader(importLegacyDiagram.Open()); string oldXml = ldr.ReadToEnd(); DiagramManager dm = new DiagramManager(context); dm.ImportOldCSETDFile(oldXml, newAssessmentId); } } catch (Exception e) { throw e; } } } }
public IHttpActionResult PostSAL(Sals tmpsal) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } int assessmentId = Auth.AssessmentForUser(); TinyMapper.Bind <Sals, STANDARD_SELECTION>(); STANDARD_SELECTION sTANDARD_SELECTION = db.STANDARD_SELECTION.Where(x => x.Assessment_Id == assessmentId).FirstOrDefault(); if (sTANDARD_SELECTION != null) { sTANDARD_SELECTION = TinyMapper.Map <Sals, STANDARD_SELECTION>(tmpsal, sTANDARD_SELECTION); } else { sTANDARD_SELECTION = TinyMapper.Map <STANDARD_SELECTION>(tmpsal); } sTANDARD_SELECTION.Assessment_Id = assessmentId; db.Entry(sTANDARD_SELECTION).State = EntityState.Modified; LevelManager lm = new LevelManager(assessmentId, db); lm.SaveOtherLevels(assessmentId, tmpsal); lm.Init(sTANDARD_SELECTION); if (tmpsal.SelectedSALOverride) { lm.SaveSALLevel(tmpsal.Selected_Sal_Level); } try { db.SaveChanges(); StandardRepository sr = new StandardRepository(assessmentId, lm, new StandardManager(), new AssessmentModeData(db, assessmentId), new StandardSpecficLevelRepository(db)); sr.Confidence_Level = tmpsal.CLevel; sr.Integrity_Level = tmpsal.ILevel; sr.Availability_Level = tmpsal.ALevel; // save the newly-calculated overall value if (!tmpsal.SelectedSALOverride) { tmpsal.Selected_Sal_Level = sr.Selected_Sal_Level; lm.SaveSALLevel(tmpsal.Selected_Sal_Level); } return(Ok(tmpsal)); } catch (DbUpdateConcurrencyException dbe) { if (!STANDARD_SELECTIONExists(assessmentId)) { return(NotFound()); } else { throw dbe; } } catch (Exception e) { CSETWeb_Api.Helpers.ElmahWrapper.LogAndReportException(e, Request, HttpContext.Current); } return(StatusCode(HttpStatusCode.NoContent)); }
public static async Task <ConverterResult <NEW_REQUIREMENT> > ToRequirement(this IExternalRequirement externalRequirement, string setName, ILogger logger) { var result = new ConverterResult <NEW_REQUIREMENT>(logger); var newRequirement = result.Result; //basic mappings newRequirement.Supplemental_Info = externalRequirement.Supplemental; newRequirement.Requirement_Text = externalRequirement.Text; newRequirement.Requirement_Title = externalRequirement.Identifier; newRequirement.Original_Set_Name = setName; newRequirement.Weight = externalRequirement.Weight; newRequirement.REQUIREMENT_LEVELS = new List <REQUIREMENT_LEVELS>(); newRequirement.REQUIREMENT_REFERENCES = new List <REQUIREMENT_REFERENCES>(); newRequirement.REQUIREMENT_SETS = new List <REQUIREMENT_SETS>() { new REQUIREMENT_SETS() { Set_Name = setName } }; //newRequirement.NEW_QUESTION = new List<NEW_QUESTION>(); QUESTION_GROUP_HEADING questionGroupHeading = null; UNIVERSAL_SUB_CATEGORY_HEADINGS subcategory = null; using (var db = new CSET_Context()) { try { questionGroupHeading = db.QUESTION_GROUP_HEADING.FirstOrDefault(s => s.Question_Group_Heading1.Trim().ToLower() == externalRequirement.Heading.Trim().ToLower()); try { var subcatId = db.UNIVERSAL_SUB_CATEGORIES.FirstOrDefault(s => s.Universal_Sub_Category.Trim().ToLower() == externalRequirement.Subheading.Trim().ToLower())?.Universal_Sub_Category_Id ?? 0; if (subcatId == 0) { var subcat = new UNIVERSAL_SUB_CATEGORIES() { Universal_Sub_Category = externalRequirement.Subheading }; db.UNIVERSAL_SUB_CATEGORIES.Add(subcat); await db.SaveChangesAsync(); subcatId = subcat.Universal_Sub_Category_Id; } try { subcategory = db.UNIVERSAL_SUB_CATEGORY_HEADINGS.FirstOrDefault(s => (s.Universal_Sub_Category_Id == subcatId) && (s.Question_Group_Heading_Id == questionGroupHeading.Question_Group_Heading_Id)); if (subcategory == null) { subcategory = new UNIVERSAL_SUB_CATEGORY_HEADINGS() { Universal_Sub_Category_Id = subcatId, Question_Group_Heading_Id = questionGroupHeading.Question_Group_Heading_Id }; db.UNIVERSAL_SUB_CATEGORY_HEADINGS.Add(subcategory); await db.SaveChangesAsync(); } } catch { } } catch { } } catch { } } if (questionGroupHeading == null) { result.LogError(String.Format("Heading invalid for requirement {0} {1}. Please double check that the heading is spelled correctly.", externalRequirement.Identifier, externalRequirement.Text)); } else { newRequirement.Question_Group_Heading_Id = questionGroupHeading.Question_Group_Heading_Id; } if (subcategory == null) { result.LogError(String.Format("Subheading invalid for requirement {0} {1}. Please double check that the heading is spelled correctly.", externalRequirement.Identifier, externalRequirement.Text)); } externalRequirement.Category = string.IsNullOrWhiteSpace(externalRequirement.Category) ? externalRequirement.Heading : externalRequirement.Category; using (var db = new CSET_Context()) { var category = db.STANDARD_CATEGORY.FirstOrDefault(s => s.Standard_Category1 == externalRequirement.Category); if (category == null) { newRequirement.Standard_CategoryNavigation = new STANDARD_CATEGORY() { Standard_Category1 = externalRequirement.Category }; } else { newRequirement.Standard_Category = category.Standard_Category1; } } foreach (var sal in Enum.GetValues(typeof(SalValues)).Cast <SalValues>().ToList()) { try { if ((int)sal >= (externalRequirement.SecurityAssuranceLevel ?? 0)) { var rl = new REQUIREMENT_LEVELS() { Standard_Level = sal.ToString(), Level_Type = "NST" }; newRequirement.REQUIREMENT_LEVELS.Add(rl); } } catch { result.LogError(String.Format("An error occurred while adding SALs for requirement {0} {1}.", externalRequirement.Identifier, externalRequirement.Text)); } } var importer = new DocumentImporter(); if (externalRequirement.References != null) { foreach (var reference in externalRequirement.References) { var reqReference = new REQUIREMENT_REFERENCES(); try { reqReference.Destination_String = reference.Destination; reqReference.Page_Number = reference.PageNumber; reqReference.Section_Ref = String.IsNullOrEmpty(reference.SectionReference) ? "" : reference.SectionReference; reqReference.Gen_File_Id = importer.LookupGenFileId(reference.FileName); } catch { result.LogError(String.Format("Reference {0} could not be added for requirement {1} {2}.", externalRequirement.Source?.FileName, externalRequirement.Identifier, externalRequirement.Text)); } if (reqReference.Gen_File_Id == 0) { result.LogError(String.Format("Reference {0} has not been loaded into CSET. Please add the file and try again.", externalRequirement.Source?.FileName, externalRequirement.Identifier, externalRequirement.Text)); } else { newRequirement.REQUIREMENT_REFERENCES.Add(reqReference); } } } var reqSource = new REQUIREMENT_SOURCE_FILES(); try { if (externalRequirement.Source != null) { reqSource.Gen_File_Id = importer.LookupGenFileId(externalRequirement.Source.FileName); reqSource.Page_Number = externalRequirement.Source.PageNumber; reqSource.Destination_String = externalRequirement.Source.Destination; reqSource.Section_Ref = String.IsNullOrEmpty(externalRequirement.Source.SectionReference) ? "" : externalRequirement.Source.SectionReference; if (reqSource.Gen_File_Id == 0) { result.LogError(String.Format("Source {0} has not been loaded into CSET. Please add the file and try again.", externalRequirement.Source?.FileName, externalRequirement.Identifier, externalRequirement.Text)); } else { newRequirement.REQUIREMENT_SOURCE_FILES.Add(reqSource); } } } catch { result.LogError(String.Format("Source {0} could not be added for requirement {1} {2}.", externalRequirement.Source?.FileName, externalRequirement.Identifier, externalRequirement.Text)); } if (externalRequirement.Questions == null || externalRequirement.Questions.Count() == 0) { externalRequirement.Questions = new QuestionList() { externalRequirement.Text }; } foreach (var question in externalRequirement.Questions) { NEW_QUESTION newQuestion = null; var set = new NEW_QUESTION_SETS() { Set_Name = setName, NEW_QUESTION_LEVELS = new List <NEW_QUESTION_LEVELS>() }; using (var db = new CSET_Context()) { newQuestion = db.NEW_QUESTION.FirstOrDefault(s => s.Simple_Question.ToLower().Trim() == question.ToLower().Trim()); if (newQuestion != null) { db.Entry(newQuestion).State = EntityState.Detached; } } if (newQuestion == null) { newQuestion = new NEW_QUESTION(); try { newQuestion.Original_Set_Name = setName; newQuestion.Simple_Question = question; newQuestion.Weight = externalRequirement.Weight; newQuestion.Question_Group_Id = questionGroupHeading.Question_Group_Heading_Id; newQuestion.Universal_Sal_Level = ((SalValues)(externalRequirement.SecurityAssuranceLevel ?? (int)SalValues.L)).ToString(); newQuestion.Std_Ref = setName.Replace("_", ""); newQuestion.Std_Ref = newQuestion.Std_Ref.Substring(0, Math.Min(newQuestion.Std_Ref.Length, 50)); newQuestion.Heading_Pair_Id = subcategory.Heading_Pair_Id; } catch { result.LogError(String.Format("Question {0} could not be added for requirement {1} {2}.", question, externalRequirement.Identifier, externalRequirement.Text)); } } foreach (var sal in Enum.GetValues(typeof(SalValues)).Cast <SalValues>().ToList()) { try { if ((int)sal >= (externalRequirement.SecurityAssuranceLevel ?? 0)) { var rl = new NEW_QUESTION_LEVELS() { Universal_Sal_Level = sal.ToString(), }; set.NEW_QUESTION_LEVELS.Add(rl); } } catch { result.LogError(String.Format("An error occurred while adding SALs for requirement {1} {2}.", externalRequirement.Source?.FileName, externalRequirement.Identifier, externalRequirement.Text)); } } newQuestion.NEW_QUESTION_SETS = new List <NEW_QUESTION_SETS>(); newQuestion.REQUIREMENT_QUESTIONS_SETS = new List <REQUIREMENT_QUESTIONS_SETS>(); newQuestion.NEW_QUESTION_SETS.Add(set); newQuestion.REQUIREMENT_QUESTIONS_SETS.Add(new REQUIREMENT_QUESTIONS_SETS { Set_Name = setName, Requirement_ = newRequirement }); using (CSET_Context db = new CSET_Context()) { db.NEW_QUESTION.Add(newQuestion); } } return(result); }
/// <summary> /// Clones requirements and their connecting rows into a new Set. /// </summary> /// <param name="copySet"></param> private void CloneRequirements(SETS copySet) { Dictionary <int, int> requirementIdMap = new Dictionary <int, int>(); Dictionary <int, int> questionSetIdMap = new Dictionary <int, int>(); using (var db = new CSET_Context()) { var queryReq = from r in db.NEW_REQUIREMENT from rs in db.REQUIREMENT_SETS.Where(x => x.Requirement_Id == r.Requirement_Id && x.Set_Name == this.origSetName) select new { r, rs }; var originalRequirements = queryReq.ToList(); // Clone NEW_REQUIREMENT and REQUIREMENT_SETS foreach (var origRequirement in originalRequirements) { var newReq = (NEW_REQUIREMENT)db.Entry(origRequirement.r).CurrentValues.ToObject(); newReq.Requirement_Id = 0; db.NEW_REQUIREMENT.Add(newReq); db.SaveChanges(); requirementIdMap.Add(origRequirement.r.Requirement_Id, newReq.Requirement_Id); var copyReqSet = (REQUIREMENT_SETS)db.Entry(origRequirement.rs).CurrentValues.ToObject(); copyReqSet.Requirement_Id = newReq.Requirement_Id; copyReqSet.Set_Name = copySet.Set_Name; db.REQUIREMENT_SETS.Add(copyReqSet); // Clone SAL levels for requirement var dbRL = db.REQUIREMENT_LEVELS .Where(x => x.Requirement_Id == origRequirement.r.Requirement_Id).ToList(); foreach (REQUIREMENT_LEVELS origLevel in dbRL) { var copyLevel = new REQUIREMENT_LEVELS { Requirement_Id = newReq.Requirement_Id, Standard_Level = origLevel.Standard_Level, Level_Type = origLevel.Level_Type, Id = origLevel.Id }; db.REQUIREMENT_LEVELS.Add(copyLevel); } } // Clone REQUIREMENT_QUESTIONS_SETS var dbRQS = db.REQUIREMENT_QUESTIONS_SETS.Where(x => x.Set_Name == origSetName).ToList(); foreach (REQUIREMENT_QUESTIONS_SETS origRQS in dbRQS) { var copyRQS = (REQUIREMENT_QUESTIONS_SETS)db.Entry(origRQS).CurrentValues.ToObject(); copyRQS.Set_Name = copySet.Set_Name; copyRQS.Requirement_Id = requirementIdMap[copyRQS.Requirement_Id]; db.REQUIREMENT_QUESTIONS_SETS.Add(copyRQS); } // Clone NEW_QUESTIONS_SETS var dbQS = db.NEW_QUESTION_SETS.Where(x => x.Set_Name == origSetName).ToList(); foreach (NEW_QUESTION_SETS origQS in dbQS) { var copyQS = (NEW_QUESTION_SETS)db.Entry(origQS).CurrentValues.ToObject(); copyQS.Set_Name = copySet.Set_Name; // default the identity PK copyQS.New_Question_Set_Id = 0; db.NEW_QUESTION_SETS.Add(copyQS); db.SaveChanges(); questionSetIdMap.Add(origQS.New_Question_Set_Id, copyQS.New_Question_Set_Id); } // Clone NEW_QUESTION_LEVELS for the new NEW_QUESTIONS_SETS just created var dbQL = from nql in db.NEW_QUESTION_LEVELS join nqs in db.NEW_QUESTION_SETS on nql.New_Question_Set_Id equals nqs.New_Question_Set_Id where nqs.Set_Name == this.origSetName select nql; var listQL = dbQL.ToList(); foreach (NEW_QUESTION_LEVELS origQL in listQL) { var copyQL = (NEW_QUESTION_LEVELS)db.Entry(origQL).CurrentValues.ToObject(); copyQL.New_Question_Set_Id = questionSetIdMap[origQL.New_Question_Set_Id]; db.NEW_QUESTION_LEVELS.Add(copyQL); } // There is no need to clone UNIVERSAL_SUB_CATEGORY_HEADINGS // because the classification of a Question with a Question Header and a Subcategory // only exists once. The Set it is tied to is the Set where the original // classification was made. // Clone REQUIREMENT_SOURCE_FILES var queryRSF = from rsf in db.REQUIREMENT_SOURCE_FILES join rs in db.REQUIREMENT_SETS on rsf.Requirement_Id equals rs.Requirement_Id where rs.Set_Name == this.origSetName select rsf; var listRSF = queryRSF.ToList(); foreach (var rsf in listRSF) { var newRSF = (REQUIREMENT_SOURCE_FILES)db.Entry(rsf).CurrentValues.ToObject(); newRSF.Requirement_Id = requirementIdMap[newRSF.Requirement_Id]; db.REQUIREMENT_SOURCE_FILES.Add(newRSF); } // Clone REQUIREMENT_REFERENCES var queryRR = from rr in db.REQUIREMENT_REFERENCES join rs in db.REQUIREMENT_SETS on rr.Requirement_Id equals rs.Requirement_Id where rs.Set_Name == this.origSetName select rr; var listRR = queryRR.ToList(); foreach (var rr in listRR) { var newRR = (REQUIREMENT_REFERENCES)db.Entry(rr).CurrentValues.ToObject(); newRR.Requirement_Id = requirementIdMap[newRR.Requirement_Id]; db.REQUIREMENT_REFERENCES.Add(newRR); } db.SaveChanges(); } }
public async Task ProcessCSETAssessmentImport(byte[] zipFileFromDatabase, int currentUserId) { using (CSET_Context web = new CSET_Context()) { //* read from db and set as memory stream here. using (Stream fs = new MemoryStream(zipFileFromDatabase)) { ZipArchive zip = new ZipArchive(fs); StreamReader r = new StreamReader(zip.GetEntry("model.json").Open()); string jsonObject = r.ReadToEnd(); // Apply any data updates to older versions ImportUpgradeManager upgrader = new ImportUpgradeManager(); jsonObject = upgrader.Upgrade(jsonObject); UploadAssessmentModel model = (UploadAssessmentModel)JsonConvert.DeserializeObject(jsonObject, new UploadAssessmentModel().GetType()); foreach (var doc in model.CustomStandardDocs) { var genFile = web.GEN_FILE.FirstOrDefault(s => s.File_Name == doc); if (genFile == null) { StreamReader docReader = new StreamReader(zip.GetEntry(doc + ".json").Open()); var docModel = JsonConvert.DeserializeObject <ExternalDocument>(docReader.ReadToEnd()); genFile = docModel.ToGenFile(); var extension = Path.GetExtension(genFile.File_Name).Substring(1); genFile.File_Type_ = web.FILE_TYPE.Where(s => s.File_Type1 == extension).FirstOrDefault(); try { web.FILE_REF_KEYS.Add(new FILE_REF_KEYS { Doc_Num = genFile.Doc_Num }); await web.SaveChangesAsync(); } catch { } web.GEN_FILE.Add(genFile); web.SaveChanges(); } } foreach (var standard in model.CustomStandards) { var sets = web.SETS.Where(s => s.Set_Name.Contains(standard)).ToList(); SETS set = null; StreamReader setReader = new StreamReader(zip.GetEntry(standard + ".json").Open()); var setJson = setReader.ReadToEnd(); var setModel = JsonConvert.DeserializeObject <ExternalStandard>(setJson); var originalSetName = setModel.ShortName; foreach (var testSet in sets) { setModel.ShortName = testSet.Short_Name; var testSetJson = JsonConvert.SerializeObject(testSet.ToExternalStandard(), Formatting.Indented); if (testSetJson == setJson) { set = testSet; break; } else { setModel.ShortName = originalSetName; } } if (set == null) { int incr = 1; while (sets.Any(s => s.Short_Name == setModel.ShortName)) { setModel.ShortName = originalSetName + " " + incr; incr++; } var setResult = await setModel.ToSet(); if (setResult.IsSuccess) { web.SETS.Add(setResult.Result); foreach (var question in setResult.Result.NEW_REQUIREMENT.SelectMany(s => s.NEW_QUESTIONs()).Where(s => s.Question_Id != 0).ToList()) { web.Entry(question).State = EntityState.Unchanged; } try { await web.SaveChangesAsync(); } catch (Exception e) { throw (e); } //Set the GUID at time of export so we are sure it's right!!! model.jANSWER = model.jANSWER.Where(s => s.Is_Requirement).GroupJoin(setResult.Result.NEW_REQUIREMENT, s => s.Custom_Question_Guid, req => new Guid(new MD5CryptoServiceProvider().ComputeHash(Encoding.Default.GetBytes(originalSetName + "|||" + req.Requirement_Title + "|||" + req.Requirement_Text))).ToString(), (erea, s) => { var req = s.FirstOrDefault(); if (req != null) { erea.Question_Or_Requirement_Id = req.Requirement_Id; } return(erea); }).Concat(model.jANSWER.Where(s => !s.Is_Requirement).GroupJoin(setResult.Result.NEW_QUESTION, s => s.Custom_Question_Guid, req => new Guid(new MD5CryptoServiceProvider().ComputeHash(Encoding.Default.GetBytes(req.Simple_Question))).ToString(), (erer, s) => { var req = s.FirstOrDefault(); if (req != null) { erer.Question_Or_Requirement_Id = req.Question_Id; } return(erer); })).ToList(); } } foreach (var availableStandard in model.jAVAILABLE_STANDARDS.Where(s => s.Set_Name == Regex.Replace(originalSetName, @"\W", "_") && s.Selected)) { availableStandard.Set_Name = Regex.Replace(setModel.ShortName, @"\W", "_"); } } string email = web.USERS.Where(x => x.UserId == currentUserId).First().PrimaryEmail; Importer import = new Importer(); Tuple <int, Dictionary <int, DOCUMENT_FILE> > t = import.RunImport(model, currentUserId, email, web); Dictionary <int, DOCUMENT_FILE> oldIdToNewDocument = t.Item2; foreach (jDOCUMENT_FILE d in model.jDOCUMENT_FILE) { DOCUMENT_FILE docDB = oldIdToNewDocument[d.Document_Id]; string newPath = Path.GetFileName(d.Path);// getPath(d.Path); ZipArchiveEntry entry = zip.GetEntry(newPath); if (entry == null) { entry = zip.GetEntry(d.Path); } if (entry != null) { SaveFileToDB(entry, docDB); } web.SaveChanges(); } } } }