/// <summary> /// Transforms a published nodes model connection header to a Connection Model object /// </summary> public ConnectionModel ToConnectionModel(PublishedNodesEntryModel model, StandaloneCliModel standaloneCliModel) { return(new ConnectionModel { Group = model.DataSetWriterGroup, // Exclude the DataSetWriterId since it is not part of the connection model Endpoint = new EndpointModel { Url = model.EndpointUrl?.OriginalString, SecurityMode = model.UseSecurity ? SecurityMode.Best : SecurityMode.None, }, User = model.OpcAuthenticationMode != OpcAuthenticationMode.UsernamePassword ? null : ToUserNamePasswordCredentialAsync(model).GetAwaiter().GetResult(), }); }
/// <summary> /// Read monitored item job from reader /// </summary> /// <param name="items"></param> /// <param name="standaloneCliModel">The standalone command line arguments</param> public IEnumerable <WriterGroupJobModel> ToWriterGroupJobs( IEnumerable <PublishedNodesEntryModel> items, StandaloneCliModel standaloneCliModel) { if (items == null) { return(Enumerable.Empty <WriterGroupJobModel>()); } var sw = Stopwatch.StartNew(); try { // note: do not remove 'unnecessary' .ToList(), // the grouping of operations improves perf by 30% // Group by connection var group = items.GroupBy( item => ToConnectionModel(item, standaloneCliModel), // Select and batch nodes into published data set sources item => GetNodeModels(item, standaloneCliModel), // Comparer for connection information new FuncCompare <ConnectionModel>((x, y) => x.IsSameAs(y)) ).ToList(); var opcNodeModelComparer = new OpcNodeModelComparer(); var flattenedGroups = group.Select( group => group // Flatten all nodes for the same connection and group by publishing interval // then batch in chunks for max 1000 nodes and create data sets from those. .Flatten() .GroupBy(n => (n.Item1, n.Item2.OpcPublishingIntervalTimespan)) .SelectMany( n => n .Distinct(opcNodeModelComparer) .Batch(standaloneCliModel.MaxNodesPerDataSet.GetValueOrDefault(1000)) ).ToList() .Select( opcNodes => new PublishedDataSetSourceModel { Connection = new ConnectionModel { Endpoint = group.Key.Endpoint.Clone(), User = group.Key.User.Clone(), Diagnostics = group.Key.Diagnostics.Clone(), Group = group.Key.Group, // add DataSetWriterId for further use Id = opcNodes.First().Item1, }, SubscriptionSettings = new PublishedDataSetSettingsModel { PublishingInterval = GetPublishingIntervalFromNodes(opcNodes), ResolveDisplayName = standaloneCliModel.FetchOpcNodeDisplayName, LifeTimeCount = (uint)_clientConfig.MinSubscriptionLifetime, MaxKeepAliveCount = _clientConfig.MaxKeepAliveCount }, PublishedVariables = new PublishedDataItemsModel { PublishedData = opcNodes.Select(node => new PublishedDataSetVariableModel { // Identifier to show for notification in payload of IoT Hub method // Prio 1: DataSetFieldId (need to be read from message) // Prio 2: DisplayName - nothing to do, because notification.Id // already contains DisplayName // Prio 3: NodeId as configured; Id remains null in this case Id = !string.IsNullOrEmpty(node.Item2.DataSetFieldId) ? node.Item2.DataSetFieldId : node.Item2.DisplayName, PublishedVariableNodeId = node.Item2.Id, // At this point in time the next values are ensured to be filled in with // the appropriate value: configured or default PublishedVariableDisplayName = node.Item2.DisplayName, SamplingInterval = node.Item2.OpcSamplingIntervalTimespan, HeartbeatInterval = node.Item2.HeartbeatIntervalTimespan, QueueSize = node.Item2.QueueSize, // ToDo: Implement mechanism for SkipFirst. SkipFirst = node.Item2.SkipFirst, }).ToList() } } ).ToList() ).ToList(); if (!flattenedGroups.Any()) { _logger.Information("No OpcNodes after job conversion."); return(Enumerable.Empty <WriterGroupJobModel>()); } var result = flattenedGroups.Select(dataSetSourceBatches => dataSetSourceBatches.Any() ? new WriterGroupJobModel { MessagingMode = standaloneCliModel.MessagingMode, Engine = _engineConfig == null ? null : new EngineConfigurationModel { BatchSize = _engineConfig.BatchSize, BatchTriggerInterval = _engineConfig.BatchTriggerInterval, DiagnosticsInterval = _engineConfig.DiagnosticsInterval, MaxMessageSize = _engineConfig.MaxMessageSize, MaxOutgressMessages = _engineConfig.MaxOutgressMessages, EnableRoutingInfo = _engineConfig.EnableRoutingInfo, }, WriterGroup = new WriterGroupModel { MessageType = standaloneCliModel.MessageEncoding, WriterGroupId = dataSetSourceBatches.First().Connection.Group, DataSetWriters = dataSetSourceBatches.Select(dataSetSource => new DataSetWriterModel { DataSetWriterId = GetUniqueWriterId(dataSetSourceBatches, dataSetSource), DataSet = new PublishedDataSetModel { DataSetSource = new PublishedDataSetSourceModel { Connection = new ConnectionModel { Endpoint = dataSetSource.Connection.Endpoint.Clone(), User = dataSetSource.Connection.User.Clone(), Diagnostics = dataSetSource.Connection.Diagnostics.Clone(), Group = dataSetSource.Connection.Group, Id = GetUniqueWriterId(dataSetSourceBatches, dataSetSource), }, PublishedEvents = dataSetSource.PublishedEvents.Clone(), PublishedVariables = dataSetSource.PublishedVariables.Clone(), SubscriptionSettings = dataSetSource.SubscriptionSettings.Clone(), }, }, DataSetFieldContentMask = DataSetFieldContentMask.StatusCode | DataSetFieldContentMask.SourceTimestamp | (standaloneCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ServerTimestamp : 0) | DataSetFieldContentMask.NodeId | DataSetFieldContentMask.DisplayName | (standaloneCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ApplicationUri : 0) | DataSetFieldContentMask.EndpointUrl | (standaloneCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ExtensionFields : 0), MessageSettings = new DataSetWriterMessageSettingsModel() { DataSetMessageContentMask = (standaloneCliModel.FullFeaturedMessage ? DataSetContentMask.Timestamp : 0) | DataSetContentMask.MetaDataVersion | DataSetContentMask.DataSetWriterId | DataSetContentMask.MajorVersion | DataSetContentMask.MinorVersion | (standaloneCliModel.FullFeaturedMessage ? DataSetContentMask.SequenceNumber : 0) } }).ToList(), MessageSettings = new WriterGroupMessageSettingsModel() { NetworkMessageContentMask = NetworkMessageContentMask.PublisherId | NetworkMessageContentMask.WriterGroupId | NetworkMessageContentMask.NetworkMessageNumber | NetworkMessageContentMask.SequenceNumber | NetworkMessageContentMask.PayloadHeader | NetworkMessageContentMask.Timestamp | NetworkMessageContentMask.DataSetClassId | NetworkMessageContentMask.NetworkMessageHeader | NetworkMessageContentMask.DataSetMessageHeader } } } : null); result = result.Where(job => job != null); var counter = 0; if (result.Any()) { foreach (var job in result) { if (job?.WriterGroup != null) { _logger.Debug("groupId: {group}", job.WriterGroup.WriterGroupId); foreach (var dataSetWriter in job.WriterGroup.DataSetWriters) { int count = dataSetWriter.DataSet?.DataSetSource?.PublishedVariables?.PublishedData?.Count ?? 0; counter += count; _logger.Debug("writerId: {writer} nodes: {count}", dataSetWriter.DataSetWriterId, count); } } } } _logger.Information("Total count of OpcNodes after job conversion: {count}", counter); return(result); } catch (Exception ex) { _logger.Error(ex, "failed to convert the published nodes."); } finally { _logger.Information("Converted published nodes entry models to jobs in {elapsed}", sw.Elapsed); sw.Stop(); } return(Enumerable.Empty <WriterGroupJobModel>()); }