/// <summary> /// Extract publishing interval from nodes /// </summary> /// <param name="opcNodes"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> private static TimeSpan?GetPublishingIntervalFromNodes(IEnumerable <OpcNodeModel> opcNodes, LegacyCliModel legacyCliModel) { var interval = opcNodes .FirstOrDefault(x => x.OpcPublishingInterval != null)?.OpcPublishingIntervalTimespan; return(interval ?? legacyCliModel.DefaultPublishingInterval); }
/// <summary> /// Read monitored item job from reader /// </summary> /// <param name="publishedNodesFile"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> public IEnumerable <WriterGroupJobModel> Read(TextReader publishedNodesFile, LegacyCliModel legacyCliModel) { var sw = Stopwatch.StartNew(); _logger.Debug("Reading published nodes file ({elapsed}", sw.Elapsed); var items = _serializer.Deserialize <List <PublishedNodesEntryModel> >( publishedNodesFile); _logger.Information( "Read {count} items from published nodes file in {elapsed}", items.Count, sw.Elapsed); sw.Restart(); var jobs = ToWriterGroupJobs(items, legacyCliModel); _logger.Information("Converted items to jobs in {elapsed}", sw.Elapsed); return(jobs); }
/// <summary> /// Read monitored item job from reader /// </summary> /// <param name="items"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> private IEnumerable <WriterGroupJobModel> ToWriterGroupJobs( IEnumerable <PublishedNodesEntryModel> items, LegacyCliModel legacyCliModel) { if (items == null) { return(Enumerable.Empty <WriterGroupJobModel>()); } try { var result = items // Group by connection .GroupBy(item => new ConnectionModel { OperationTimeout = legacyCliModel.OperationTimeout, Id = item.DataSetWriterId, Group = item.DataSetWriterGroup, Endpoint = new EndpointModel { Url = item.EndpointUrl.OriginalString, SecurityMode = item.UseSecurity == false && item.OpcAuthenticationMode != OpcAuthenticationMode.UsernamePassword ? SecurityMode.None : SecurityMode.Best }, User = item.OpcAuthenticationMode != OpcAuthenticationMode.UsernamePassword ? null : ToUserNamePasswordCredentialAsync(item).Result, }, // Select and batch nodes into published data set sources item => GetNodeModels(item, legacyCliModel.ScaleTestCount.GetValueOrDefault(1)), // Comparer for connection information new FuncCompare <ConnectionModel>((x, y) => x.IsSameAs(y))) .Select(group => group // Flatten all nodes for the same connection and group by publishing interval // then batch in chunks for max 1000 nodes and create data sets from those. .Flatten() .GroupBy(n => n.OpcPublishingInterval) .SelectMany(n => n .Distinct((a, b) => a.Id == b.Id && a.DisplayName == b.DisplayName && a.DataSetFieldId == b.DataSetFieldId && a.OpcSamplingInterval == b.OpcSamplingInterval) .Batch(1000)) .Select(opcNodes => new PublishedDataSetSourceModel { Connection = group.Key.Clone(), SubscriptionSettings = new PublishedDataSetSettingsModel { PublishingInterval = GetPublishingIntervalFromNodes(opcNodes, legacyCliModel), ResolveDisplayName = legacyCliModel.FetchOpcNodeDisplayName }, PublishedVariables = new PublishedDataItemsModel { PublishedData = opcNodes .Select(node => new PublishedDataSetVariableModel { // this is the monitored item id, not the nodeId! // Use the display name if any otherwise the nodeId Id = string.IsNullOrEmpty(node.DisplayName) ? string.IsNullOrEmpty(node.DataSetFieldId) ? node.Id : node.DataSetFieldId : node.DisplayName, PublishedVariableNodeId = node.Id, PublishedVariableDisplayName = node.DisplayName, SamplingInterval = node.OpcSamplingIntervalTimespan ?? legacyCliModel.DefaultSamplingInterval, HeartbeatInterval = node.HeartbeatIntervalTimespan.HasValue ? node.HeartbeatIntervalTimespan.Value : legacyCliModel.DefaultHeartbeatInterval, QueueSize = legacyCliModel.DefaultQueueSize, // TODO: skip first? // SkipFirst = opcNode.SkipFirst, }).ToList() } })) .Select(dataSetSourceBatches => new WriterGroupJobModel { MessagingMode = legacyCliModel.MessagingMode, Engine = _config == null ? null : new EngineConfigurationModel { BatchSize = _config.BatchSize, BatchTriggerInterval = _config.BatchTriggerInterval, DiagnosticsInterval = _config.DiagnosticsInterval, MaxMessageSize = _config.MaxMessageSize, MaxEgressMessageQueue = _config.MaxEgressMessageQueue }, WriterGroup = new WriterGroupModel { MessageType = legacyCliModel.MessageEncoding, WriterGroupId = !string.IsNullOrEmpty(dataSetSourceBatches.First().Connection.Group) ? $"{dataSetSourceBatches.First().Connection.Group}" : $"{dataSetSourceBatches.First().Connection.Endpoint.Url}_" + $"{new ConnectionIdentifier(dataSetSourceBatches.First().Connection)}", DataSetWriters = dataSetSourceBatches.Select(dataSetSource => new DataSetWriterModel { DataSetWriterId = !string.IsNullOrEmpty(dataSetSource.Connection.Id) ? $"{dataSetSource.Connection.Id}" : $"{dataSetSource.Connection.Endpoint.Url}_" + $"{dataSetSource.GetHashSafe()}", DataSet = new PublishedDataSetModel { DataSetSource = dataSetSource.Clone(), }, DataSetFieldContentMask = DataSetFieldContentMask.StatusCode | DataSetFieldContentMask.SourceTimestamp | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ServerTimestamp : 0) | DataSetFieldContentMask.NodeId | DataSetFieldContentMask.DisplayName | DataSetFieldContentMask.ApplicationUri | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.EndpointUrl : 0) | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ExtensionFields : 0), MessageSettings = new DataSetWriterMessageSettingsModel() { DataSetMessageContentMask = (legacyCliModel.FullFeaturedMessage ? DataSetContentMask.Timestamp : 0) | DataSetContentMask.MetaDataVersion | DataSetContentMask.DataSetWriterId | DataSetContentMask.MajorVersion | DataSetContentMask.MinorVersion | (legacyCliModel.FullFeaturedMessage ? DataSetContentMask.SequenceNumber : 0) } }).ToList(), MessageSettings = new WriterGroupMessageSettingsModel() { NetworkMessageContentMask = NetworkMessageContentMask.PublisherId | NetworkMessageContentMask.WriterGroupId | NetworkMessageContentMask.NetworkMessageNumber | NetworkMessageContentMask.SequenceNumber | NetworkMessageContentMask.PayloadHeader | NetworkMessageContentMask.Timestamp | NetworkMessageContentMask.DataSetClassId | NetworkMessageContentMask.NetworkMessageHeader | NetworkMessageContentMask.DataSetMessageHeader } } }).ToList(); return(result); } catch (Exception ex) { _logger.Error(ex, "failed to convert the published nodes."); } return(Enumerable.Empty <WriterGroupJobModel>()); }
/// <summary> /// Read monitored item job from reader /// </summary> /// <param name="items"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> private IEnumerable <WriterGroupJobModel> ToWriterGroupJobs( IEnumerable <PublishedNodesEntryModel> items, LegacyCliModel legacyCliModel) { if (items == null) { return(Enumerable.Empty <WriterGroupJobModel>()); } return(items // Group by connection .GroupBy(item => new ConnectionModel { OperationTimeout = legacyCliModel.OperationTimeout, Endpoint = new EndpointModel { Url = item.EndpointUrl.OriginalString, SecurityMode = item.UseSecurity == false ? SecurityMode.None : SecurityMode.Best }, User = item.OpcAuthenticationMode != OpcAuthenticationMode.UsernamePassword ? null :ToUserNamePasswordCredentialAsync(item).Result }, // Select and batch nodes into published data set sources item => GetNodeModels(item), // Comparer for connection information new FuncCompare <ConnectionModel>((x, y) => x.IsSameAs(y))) .Select(group => group // Flatten all nodes for the same connection and group by publishing interval // then batch in chunks for max 1000 nodes and create data sets from those. .Flatten() .GroupBy(n => n.OpcPublishingInterval) .SelectMany(n => n .Distinct((a, b) => a.Id == b.Id && a.OpcSamplingInterval == b.OpcSamplingInterval) .Batch(1000)) .Select(opcNodes => new PublishedDataSetSourceModel { Connection = group.Key.Clone(), SubscriptionSettings = new PublishedDataSetSettingsModel { PublishingInterval = GetPublishingIntervalFromNodes(opcNodes, legacyCliModel), ResolveDisplayName = legacyCliModel.FetchOpcNodeDisplayName }, PublishedVariables = new PublishedDataItemsModel { PublishedData = opcNodes .Select(node => new PublishedDataSetVariableModel { Id = node.Id, PublishedVariableNodeId = node.Id, PublishedVariableDisplayName = node.DisplayName, SamplingInterval = node.OpcSamplingIntervalTimespan ?? legacyCliModel.DefaultSamplingInterval ?? (TimeSpan?)null, HeartbeatInterval = node.HeartbeatInterval == null ? (TimeSpan?)null : TimeSpan.FromSeconds(node.HeartbeatInterval.Value), // Force the queue size to 2 so that we avoid data // loss in case publishing interval and sampling interval are equal QueueSize = 2 // TODO: skip first? // SkipFirst = opcNode.SkipFirst, }) .ToList() } })) .SelectMany(dataSetSourceBatches => dataSetSourceBatches .Select(dataSetSource => new WriterGroupJobModel { MessagingMode = legacyCliModel.MessagingMode, Engine = _config == null ? null : new EngineConfigurationModel { BatchSize = _config.BatchSize, DiagnosticsInterval = _config.DiagnosticsInterval, MaxMessageSize = _config.MaxMessageSize }, WriterGroup = new WriterGroupModel { MessageType = MessageEncoding.Json, WriterGroupId = dataSetSource.Connection.Endpoint.Url, DataSetWriters = new List <DataSetWriterModel> { new DataSetWriterModel { DataSetWriterId = dataSetSource.Connection.Endpoint.Url, DataSet = new PublishedDataSetModel { DataSetSource = dataSetSource.Clone(), }, DataSetFieldContentMask = DataSetFieldContentMask.StatusCode | DataSetFieldContentMask.SourceTimestamp | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ServerTimestamp : 0) | DataSetFieldContentMask.NodeId | DataSetFieldContentMask.DisplayName | DataSetFieldContentMask.ApplicationUri | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.EndpointUrl : 0) | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ExtensionFields : 0), MessageSettings = new DataSetWriterMessageSettingsModel() { DataSetMessageContentMask = (legacyCliModel.FullFeaturedMessage ? DataSetContentMask.Timestamp : 0) | DataSetContentMask.MetaDataVersion | DataSetContentMask.DataSetWriterId | DataSetContentMask.MajorVersion | DataSetContentMask.MinorVersion | (legacyCliModel.FullFeaturedMessage ? DataSetContentMask.SequenceNumber : 0) } } }, MessageSettings = new WriterGroupMessageSettingsModel() { NetworkMessageContentMask = NetworkMessageContentMask.PublisherId | NetworkMessageContentMask.WriterGroupId | NetworkMessageContentMask.NetworkMessageNumber | NetworkMessageContentMask.SequenceNumber | NetworkMessageContentMask.PayloadHeader | NetworkMessageContentMask.Timestamp | NetworkMessageContentMask.DataSetClassId | NetworkMessageContentMask.NetworkMessageHeader | NetworkMessageContentMask.DataSetMessageHeader } } }))); }