public bool AddNodeSet(UANodeSetImportResult results, string nodeSetXml, object authorId) { bool WasNewSet = false; #region Comment Processing var doc = XElement.Load(new StringReader(nodeSetXml)); var comments = doc.DescendantNodes().OfType <XComment>(); foreach (XComment comment in comments) { //inline XML Commments are not showing here...only real XML comments (not file comments with /**/) //Unfortunately all OPC UA License Comments are not using XML Comments but file-comments and therefore cannot be "preserved" } #endregion UANodeSet nodeSet; // workaround for bug https://github.com/dotnet/runtime/issues/67622 var nodeSetXmlPatched = nodeSetXml.Replace("<Value/>", "<Value xsi:nil='true' />"); using (var nodesetBytes = new MemoryStream(Encoding.UTF8.GetBytes(nodeSetXmlPatched))) { nodeSet = UANodeSet.Read(nodesetBytes); } if (nodeSet.Models?.Any() != true) { nodeSet.Models = new ModelTableEntry[] { new ModelTableEntry { ModelUri = nodeSet.NamespaceUris?.FirstOrDefault(), RequiredModel = new ModelTableEntry[] { new ModelTableEntry { ModelUri = "http://opcfoundation.org/UA/" } }, } }; } UANodeSet tOldNodeSet = null; foreach (var ns in nodeSet.Models) { UserToken userToken = authorId as UserToken; var authorToken = userToken; bool isGlobalNodeSet = CESMII.ProfileDesigner.OpcUa.OpcUaImporter._coreNodeSetUris.Contains(ns.ModelUri); if (isGlobalNodeSet) { userToken = UserToken.GetGlobalUser(userToken); // Write as a global node set shared acess user authorToken = null; } NodeSetFileModel myModel = GetProfileModel( new ModelNameAndVersion { ModelUri = ns.ModelUri, ModelVersion = ns.Version, PublicationDate = ns.PublicationDate, }, userToken); if (myModel == null) { myModel = results.Models.FirstOrDefault(m => m.NameVersion.IsNewerOrSame(new ModelNameAndVersion { ModelUri = ns.ModelUri, ModelVersion = ns.Version, PublicationDate = ns.PublicationDate, } ))?.NameVersion?.CCacheId as NodeSetFileModel; } bool CacheNewerVersion = true; if (myModel != null) { CacheNewerVersion = false; // workaround for bug https://github.com/dotnet/runtime/issues/67622 var fileCachepatched = myModel.FileCache.Replace("<Value/>", "<Value xsi:nil='true' />"); using (var nodeSetStream = new MemoryStream(Encoding.UTF8.GetBytes(fileCachepatched))) { if (tOldNodeSet == null) { tOldNodeSet = UANodeSet.Read(nodeSetStream); } var tns = tOldNodeSet.Models.Where(s => s.ModelUri == ns.ModelUri).OrderByDescending(s => s.PublicationDate).FirstOrDefault(); if (tns == null || ns.PublicationDate > tns.PublicationDate) { CacheNewerVersion = true; //Cache the new NodeSet if the old (file) did not contain the model or if the version of the new model is greater } } } int? cacheId = myModel != null ? myModel.ID : 0; bool newInImport = false; if (CacheNewerVersion) //Cache only newer version { if (myModel == null) { myModel = new NodeSetFileModel { ID = cacheId, FileName = ns.ModelUri, Version = ns.Version, PublicationDate = ns.PublicationDate, // TODO clean up the dependency AuthorId = authorToken?.UserId, FileCache = nodeSetXml }; // Defer Upsert until later to make it part of a transaction // _dalNodeSetFile.Upsert(myModel, userToken, false); newInImport = true; } // Defer the updates to the import transaction //var resIns = _dalNodeSetFile.Upsert(nsModel, (AuthorID == null) ? 0 : (int)AuthorID, true).GetAwaiter().GetResult(); //cacheId = resIns.Item1; //newInImport = resIns.Item2; WasNewSet = true; } var tModel = results.AddModelAndDependencies(nodeSet, ns, null, WasNewSet); if (tModel?.NameVersion != null && myModel != null) { tModel.NameVersion.CCacheId = myModel; tModel.NewInThisImport = newInImport; } foreach (var model in results.Models) { if (model.NameVersion.CCacheId == null) { GetProfileModel(model.NameVersion, userToken); } } } return(WasNewSet); }
/// <summary> /// Re-factor - Moved this to its own method to be shared by two different endpoints. Only other changes were /// returning result message model false instead of badRequest. /// </summary> /// <param name="nodeSetXmlList"></param> /// <param name="authorToken"></param> /// <returns></returns> private async Task ImportOpcUaNodeSetInternal(List <ImportOPCModel> nodeSetXmlList, int logId, UserToken userToken) { var dalImportLog = GetImportLogDalIsolated(); var sw = Stopwatch.StartNew(); _logger.LogTrace("Starting import"); #region CM new code for importing all NodeSet in correct order and with Dependency Resolution var fileNames = string.Join(", ", nodeSetXmlList.Select(f => f.FileName).ToArray <string>()); var filesImportedMsg = $"Importing File{(nodeSetXmlList.Count.Equals(1) ? "" : "s")}: {fileNames}"; _logger.LogInformation($"ImportService|ImportOpcUaProfile|{filesImportedMsg}. User Id:{userToken}."); //wrap in scope so that we don't lose the scope of the dependency injected objects once the //web api request completes and disposes of the import service object (and its module vars) using (var scope = _serviceScopeFactory.CreateScope()) { _logger.LogTrace($"Timestamp||ImportId:{logId}||Getting DAL services: {sw.Elapsed}"); var dalProfile = scope.ServiceProvider.GetService <IDal <Profile, ProfileModel> >(); var dalNodeSetFile = scope.ServiceProvider.GetService <IDal <NodeSetFile, NodeSetFileModel> >(); var dalStandardNodeSet = scope.ServiceProvider.GetService <IDal <StandardNodeSet, StandardNodeSetModel> >(); var dalEngineeringUnits = scope.ServiceProvider.GetService <IDal <EngineeringUnit, EngineeringUnitModel> >(); var importer = scope.ServiceProvider.GetService <OpcUaImporter>(); _logger.LogTrace($"Timestamp||ImportId:{logId}||Retrieved DAL services: {sw.Elapsed}"); if (dalEngineeringUnits.Count(userToken) == 0) { await CreateImportLogMessage(dalImportLog, logId, userToken, $"Importing engineering units...<br/>{filesImportedMsg}", TaskStatusEnum.InProgress); await importer.ImportEngineeringUnitsAsync(UserToken.GetGlobalUser(userToken)); } await CreateImportLogMessage(dalImportLog, logId, userToken, $"Validating nodeset files and dependencies...<br/>{filesImportedMsg}", TaskStatusEnum.InProgress); //init the warnings object outside the try/catch so that saving warnings happens after conclusion of import. //We don't want an execption saving warnings to DB to cause a "failed" import message //if something goes wrong on the saving of the warnings to the DB, we handle it outside of the import messages. List <WarningsByNodeSet> nodesetWarnings = new List <WarningsByNodeSet>(); //wrap the importNodesets for total coverage of exceptions //we need to inform the front end of an exception and update the import log on //any failure so it can refresh front end accordingly //Todo: Revisit this and limit the try/catch blocks try { //TODO: Can we pass in authorId (nullable) to the import and then assign it if is passed in. In either case, //assign external author to the value set within the nodeset. //TODO: C2-95: Last parameter should be a setting in the UX if somebody wants to use the Precise NodeSet Version instead of the highest(last) version available //The first parameter can be used to define a custom UANodeSetCache. If null the default FileCache is used //var myNodeSetCache = new OPCUANodeSetHelpers.UANodeSetFileCache(); //FILE CACHE var myNodeSetCache = new UANodeSetDBCache(dalNodeSetFile, dalStandardNodeSet, userToken); // DB CACHE dalProfile.StartTransaction(); _logger.LogTrace($"Timestamp||ImportId:{logId}||Importing node set files: {sw.Elapsed}"); var nodeSetXmlStringList = nodeSetXmlList.Select(nodeSetXml => nodeSetXml.Data).ToList(); OnNodeSet callback = (string namespaceUri, DateTime? publicationDate) => { CreateImportLogMessage(dalImportLog, logId, userToken, $"Downloading from Cloud Library: {namespaceUri} {publicationDate}", TaskStatusEnum.InProgress).Wait(); }; UANodeSetImportResult resultSet; try { _nodeSetResolver.OnDownloadNodeSet += callback; resultSet = UANodeSetImporter.ImportNodeSets(myNodeSetCache, null, nodeSetXmlStringList, false, userToken, _nodeSetResolver); } finally { _nodeSetResolver.OnDownloadNodeSet -= callback; } _logger.LogTrace($"Timestamp||ImportId:{logId}||Imported node set files: {sw.Elapsed}"); if (!string.IsNullOrEmpty(resultSet.ErrorMessage)) { //The UA Importer encountered a crash/error //failed complete message dalProfile.RollbackTransaction(); await CreateImportLogMessage(dalImportLog, logId, userToken, resultSet.ErrorMessage + $"<br/>{filesImportedMsg}", TaskStatusEnum.Failed); return; } if (resultSet?.MissingModels?.Count > 0) { //The UA Importer tried to resolve already all missing NodeSet either from Cache or CloudLib but could not find all dependencies //failed complete message dalProfile.RollbackTransaction(); var missingModelsText = string.Join(", ", resultSet.MissingModels); await CreateImportLogMessage(dalImportLog, logId, userToken, $"Missing dependent node sets: {missingModelsText}.", TaskStatusEnum.Failed); return; } var profilesAndNodeSets = new List <ProfileModelAndNodeSet>(); //This area will be put in an interface that can be used by the Importer (after Friday Presentation) try { _logger.LogTrace($"Timestamp||ImportId:{logId}||Getting standard nodesets files: {sw.Elapsed}"); var res = dalStandardNodeSet.GetAll(userToken); _logger.LogTrace($"Timestamp||ImportId:{logId}||Verifying standard nodeset: {sw.Elapsed}"); resultSet = UANodeSetValidator.VerifyNodeSetStandard(resultSet, res); //TODO: @Chris - Capture if specific nodeset is not in standard table and report that specifically. Separate that validation // from potential issues with the import itself. //Chris: Done, but ErrorMessage is only set if there was an issue with the function, NOT if a noodeset was in the standard table. // If a nodeset is UA Standard, the NameVersion.UAStandardProfileID is set to >0. otherwise its 0. if (!string.IsNullOrEmpty(resultSet?.ErrorMessage)) { await CreateImportLogMessage(dalImportLog, logId, userToken, resultSet.ErrorMessage.ToLower() + $"<br/>{filesImportedMsg}", TaskStatusEnum.Failed); return; } //if (myNodeSetCache == null || myNodeSetCache.GetType() != typeof(OPCUANodeSetHelpers.UANodeSetFileCache)) { foreach (var tmodel in resultSet.Models) { var nsModel = tmodel.NameVersion.CCacheId as NodeSetFileModel; _logger.LogTrace($"Timestamp||ImportId:{logId}||Loading nodeset {tmodel.NameVersion.ModelUri}: {sw.Elapsed}"); var profile = dalProfile.Where(p => p.Namespace == tmodel.NameVersion.ModelUri /*&& p.PublicationDate == tmodel.NameVersion.PublicationDate*/ /*&& (p.AuthorId == null || p.AuthorId == userToken)*/, userToken, verbose: false)?.Data?.OrderByDescending(p => p.Version)?.FirstOrDefault(); _logger.LogTrace($"Timestamp||ImportId:{logId}||Loaded nodeset {tmodel.NameVersion.ModelUri}: {sw.Elapsed}"); if (profile == null) { profile = new ProfileModel { Namespace = tmodel.NameVersion.ModelUri, PublishDate = tmodel.NameVersion.PublicationDate, Version = tmodel.NameVersion.ModelVersion, AuthorId = nsModel.AuthorId, StandardProfileID = tmodel.NameVersion.UAStandardModelID, }; } if (profile.NodeSetFiles == null) { profile.NodeSetFiles = new List <NodeSetFileModel>(); } if (!profile.NodeSetFiles.Where(m => m.FileName == nsModel.FileName && m.PublicationDate == nsModel.PublicationDate).Any()) { profile.NodeSetFiles.Add(nsModel); } profilesAndNodeSets.Add(new ProfileModelAndNodeSet { Profile = profile, // TODO use the nodesetfile instead NodeSetModel = tmodel, }); } } await CreateImportLogMessage(dalImportLog, logId, userToken, $"Nodeset files validated.<br/>{filesImportedMsg}", TaskStatusEnum.InProgress); } catch (Exception e) { myNodeSetCache.DeleteNewlyAddedNodeSetsFromCache(resultSet); //log complete message to logger and abbreviated message to user. _logger.LogCritical(e, $"ImportId:{logId}||ImportService|ImportOpcUaProfile|{e.Message}"); //failed complete message dalProfile.RollbackTransaction(); await CreateImportLogMessage(dalImportLog, logId, userToken, $"Nodeset validation failed: {e.Message}.<br/>{filesImportedMsg}", TaskStatusEnum.Failed); return; } //To Here #endregion _logger.LogTrace($"Timestamp||ImportId:{logId}||Starting node import: {sw.Elapsed}"); await CreateImportLogMessage(dalImportLog, logId, userToken, $"Processing nodeset data...<br/>{filesImportedMsg}", TaskStatusEnum.InProgress); //shield the front end from an exception message. Catch it, log it, and return success is false w/ simplified message try { Dictionary <string, ProfileTypeDefinitionModel> profileItems = new Dictionary <string, ProfileTypeDefinitionModel>(); int?result = 0; // TODO Expose in the UI? Feedback from Jonathan if this option is interesting bool requiredTypesOnly = userToken != null; // Only import profiles for types actually used by the node set Task primeEFCacheTask = null; var startEFCache = sw.Elapsed; if (true) { var profileIds = profilesAndNodeSets.Select(pn => pn.Profile.ID).Where(i => (i ?? 0) != 0); _logger.LogTrace($"Timestamp||ImportId:{logId}||Loading EF cache: {sw.Elapsed}"); primeEFCacheTask = importer._dal.LoadIntoCacheAsync(pt => profileIds.Contains(pt.ProfileId)); primeEFCacheTask = primeEFCacheTask.ContinueWith((t) => importer._dtDal.LoadIntoCacheAsync(dt => profileIds.Contains(dt.CustomType.ProfileId))).Unwrap(); } var modelsToImport = new List <NodeSetModel>(); foreach (var profileAndNodeSet in profilesAndNodeSets) { //only show message for the items which are newly imported... if (profileAndNodeSet.NodeSetModel.NewInThisImport) { await CreateImportLogMessage(dalImportLog, logId, userToken, $"Processing nodeset file: {profileAndNodeSet.NodeSetModel.NameVersion}...", TaskStatusEnum.InProgress); } var logList = new List <string>(); (importer.Logger as LoggerCapture).LogList = logList; var nodeSetModels = await importer.LoadNodeSetAsync(profileAndNodeSet.NodeSetModel.NodeSet, profileAndNodeSet.Profile, !profileAndNodeSet.NodeSetModel.NewInThisImport); if (profileAndNodeSet.NodeSetModel.NewInThisImport) { foreach (var model in nodeSetModels) { if (modelsToImport.FirstOrDefault(m => m.ModelUri == model.ModelUri) == null) { modelsToImport.Add(model); if (primeEFCacheTask != null) { _logger.LogTrace($"Timestamp||ImportId:{logId}||Waiting for EF cache to load"); await primeEFCacheTask; var endEFCache = sw.Elapsed; _logger.LogTrace($"Timestamp||ImportId:{logId}||Finished loading EF cache: {endEFCache - startEFCache}"); primeEFCacheTask = null; } var items = await importer.ImportNodeSetModelAsync(model, userToken); if (items != null) { foreach (var item in items) { profileItems[item.Key] = item.Value; } } // Uncomment to test nodesetmodel fidelity vs. profile import/export //string xmlNodeSet = null; //using (var xmlNodeSetStream = new MemoryStream()) //{ // if (importer.ExportNodeSet(profileAndNodeSet.Profile, xmlNodeSetStream, userToken, null)) // { // xmlNodeSet = Encoding.UTF8.GetString(xmlNodeSetStream.ToArray()); // } //} //File.WriteAllText($"{profileAndNodeSet.Profile.Namespace.Replace("http://", "").Replace("/", ".")}reexported.xml", xmlNodeSet); } (importer.Logger as LoggerCapture).LogList = null; if (logList.Any()) { nodesetWarnings.Add(new WarningsByNodeSet() { ProfileId = profileAndNodeSet.Profile.ID.Value, Key = profileAndNodeSet.Profile.ToString(), Warnings = logList }); //nodesetWarnings[profileAndNodeSet.Profile.ToString()] = logList; } } } } if (profileItems.Any()) { result = profileItems.Last().Value.ID; } //foreach(var profileItem in profileItems) //{ // result = await ImportInternalAsync(profileItem); //} result = 1; // TOD: OPC imported profiles don't get a profiletype when being read back for some reason sw.Stop(); var elapsed = sw.Elapsed; var elapsedMsg = $"{ elapsed.Minutes }:{ elapsed.Seconds} (min:sec)"; _logger.LogTrace($"Timestamp||ImportId:{logId}||Import time: {elapsedMsg}, Files: {fileNames} "); //use warning so it shows in app log in db //return success message object filesImportedMsg = $"Imported File{(nodeSetXmlList.Count.Equals(1) ? "" : "s")}: {fileNames}"; await CreateImportLogMessage(dalImportLog, logId, userToken, $"{filesImportedMsg}", TaskStatusEnum.Completed); } catch (Exception e) { sw.Stop(); var elapsed2 = sw.Elapsed; var elapsedMsg2 = $"{ elapsed2.Minutes }:{ elapsed2.Seconds} (min:sec)"; _logger.LogWarning($"Timestamp||ImportId:{logId}||Import time before failure: {elapsedMsg2}, Files: {fileNames} "); //use warning so it shows in app log in db //log complete message to logger and abbreviated message to user. _logger.LogCritical(e, $"ImportId:{logId}||ImportService|ImportOpcUaProfile|{e.Message}"); //TBD - once we stabilize, take out the specific exception message returned to user because user should not see a code message. dalProfile.RollbackTransaction(); var message = e.InnerException != null ? e.InnerException.Message : e.Message; await CreateImportLogMessage(GetImportLogDalIsolated(), logId, userToken, $"An error occurred during the import: {message}.<br/>{filesImportedMsg}", TaskStatusEnum.Failed); //return; } } catch (Exception ex) { _logger.LogCritical($"ImportId:{logId}||ImportOpcUaNodeSet error", ex); dalProfile.RollbackTransaction(); await CreateImportLogMessage(GetImportLogDalIsolated(), logId, userToken, $"An error occurred during the import: {ex.Message}.<br/>{filesImportedMsg}", TaskStatusEnum.Failed); //return; } //handle import warnings. Save to DB for each nodeset / profile. //Store for later use when we export profile. try { foreach (var warningList in nodesetWarnings) { //save each nodesets warnings to the DB...for display upon export //don't show a warning message on the import ui at this point. await CreateImportLogWarnings(dalImportLog, logId, warningList, userToken); //var msgCount = warningList.Warnings.Count == 1 ? "There is 1 warning." : $"There are {warningList.Warnings.Count} warnings."; //filesImportedMsg += $"\r\nWarning: Some data in nodeset {warningList.Key} is not supported by this editor and will be lost if exported. {msgCount}"; } } catch (Exception ex) { var message = ex.InnerException != null ? ex.InnerException.Message : ex.Message; _logger.LogCritical($"ImportId:{logId}||ImportOpcUaNodeSet||Save Import Profile Warnings||error||{message}", ex); } } //end createScope using }
public async System.Threading.Tasks.Task <Dictionary <string, ProfileTypeDefinitionModel> > ImportNodeSetModelAsync(NodeSetModel nodeSetModel, UserToken userToken) { #if NODESETDBTEST { var sw2 = Stopwatch.StartNew(); Logger.LogTrace($"Saving NodeSetModel"); foreach (var nodeSet in NodesetModels.Where(ns => ns.Key != nodeSetModel.ModelUri)) { nsDBContext.NodeSets.Attach(nodeSet.Value); } nsDBContext.NodeSets.Add(nodeSetModel); nsDBContext.SaveChanges(); Logger.LogTrace($"Saved NodeSetModel after {sw2.Elapsed}"); var savedModel = nsDBContext.NodeSets .Where(m => m.ModelUri == nodeSetModel.ModelUri && m.PublicationDate == nodeSetModel.PublicationDate) .FirstOrDefault(); //.ToList(); //var savedModel2 = nsDBContext.NodeSets.Find(nodeSetModel.ModelUri, nodeSetModel.PublicationDate); } #endif ProfileModel profile = (ProfileModel)nodeSetModel.CustomState; var authorToken = userToken; if (_coreNodeSetUris.Contains(profile.Namespace)) { userToken = UserToken.GetGlobalUser(userToken); authorToken = null; } _dal.StartTransaction(); foreach (var nsFile in profile.NodeSetFiles) { await _nsFileDal.Upsert(nsFile, userToken, true); } var result = await _nsDal.Upsert(profile, userToken, true); var dalContext = new DALContext(this, userToken, authorToken, false); var profileItems = ImportProfileItems(nodeSetModel, dalContext); var sw = Stopwatch.StartNew(); Logger.LogTrace($"Commiting transaction"); await _dal.CommitTransactionAsync(); Logger.LogTrace($"Committed transaction after {sw.Elapsed}"); // TODO figure out why the InstanceParent property doesn't get written propertly: this fixup only works for some cases and is very slow //foreach (var item in dalContext.profileItems.Values.Where(pi => pi.InstanceParent != null)) //{ // var existingItem = await _dal.GetExistingAsync(item, userId); // if (existingItem.InstanceParent == null) // { // existingItem.InstanceParent = item.InstanceParent; // try // { // await dalContext.UpsertAsync(existingItem, true); // } // catch (Exception ex) // { // Logger.LogError(ex.InnerException != null ? ex.InnerException : ex , $"Error updating instance parent for {existingItem} to {item.InstanceParent}"); // } // } //} if ((profile.ID ?? 0) == 0) { // Ensure that the Profile has an ID, as it is referenced by the imported NodeModels. var writtenProfile = await _nsDal.GetExistingAsync(profile, userToken); profile.ID = writtenProfile?.ID; } return(profileItems); }