public async Task GetAvailableJobAsyncMulithreading() { var legacyCliModelProviderMock = new Mock <ILegacyCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var publishedNodesJobConverter = new PublishedNodesJobConverter(TraceLogger.Create(), newtonSoftJsonSerializer); var legacyCliModel = new LegacyCliModel { PublishedNodesFile = "Engine/publishednodes.json" }; legacyCliModelProviderMock.Setup(p => p.LegacyCliModel).Returns(legacyCliModel); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var converter = new LegacyJobOrchestrator(publishedNodesJobConverter, legacyCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, TraceLogger.Create(), identityMock.Object); var tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < 10; i++) { tasks.Add(converter.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } await Task.WhenAll(tasks); Assert.Equal(2, tasks.Count(t => t.Result != null)); var distinctConfigurations = tasks .Where(t => t.Result != null) .Select(t => t.Result.Job.JobConfiguration) .Distinct(); Assert.Equal(2, distinctConfigurations.Count()); }
public void Test_PnJson_With_Multiple_Jobs_Expect_DifferentJobIds() { var legacyCliModelProviderMock = new Mock <ILegacyCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var publishedNodesJobConverter = new PublishedNodesJobConverter(TraceLogger.Create(), newtonSoftJsonSerializer); var legacyCliModel = new LegacyCliModel { PublishedNodesFile = "Engine/pn_assets.json" }; legacyCliModelProviderMock.Setup(p => p.LegacyCliModel).Returns(legacyCliModel); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var converter = new LegacyJobOrchestrator(publishedNodesJobConverter, legacyCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, TraceLogger.Create(), identityMock.Object); var job1 = converter.GetAvailableJobAsync(1.ToString(), new JobRequestModel()).GetAwaiter().GetResult(); Assert.NotNull(job1); var job2 = converter.GetAvailableJobAsync(2.ToString(), new JobRequestModel()).GetAwaiter().GetResult(); Assert.NotNull(job2); var job3 = converter.GetAvailableJobAsync(3.ToString(), new JobRequestModel()).GetAwaiter().GetResult(); Assert.Null(job3); Assert.NotEqual(job1.Job.Id, job2.Job.Id); }
/// <summary> /// Read monitored item job from reader /// </summary> /// <param name="publishedNodesFile"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> public IEnumerable <WriterGroupJobModel> Read(TextReader publishedNodesFile, LegacyCliModel legacyCliModel) { var jsonSerializer = JsonSerializer.CreateDefault(); var sw = Stopwatch.StartNew(); using (var reader = new JsonTextReader(publishedNodesFile)) { _logger.Debug("Reading published nodes file ({elapsed}", sw.Elapsed); var items = jsonSerializer.Deserialize <List <PublishedNodesEntryModel> >(reader); _logger.Information( "Read {count} items from published nodes file in {elapsed}", items.Count, sw.Elapsed); sw.Restart(); var jobs = ToWriterGroupJobs(items, legacyCliModel); _logger.Information("Converted items to jobs in {elapsed}", sw.Elapsed); return(jobs); } }
/// <summary> /// Read monitored item job from reader /// </summary> /// <param name="items"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> private IEnumerable <WriterGroupJobModel> ToWriterGroupJobs( IEnumerable <PublishedNodesEntryModel> items, LegacyCliModel legacyCliModel) { if (items == null) { return(Enumerable.Empty <WriterGroupJobModel>()); } return(items // Group by connection .GroupBy(item => new ConnectionModel { Endpoint = new EndpointModel { Url = item.EndpointUrl.OriginalString, SecurityMode = item.UseSecurity == false ? SecurityMode.None : SecurityMode.Best }, User = _cryptoProvider != null && item.OpcAuthenticationMode != OpcAuthenticationMode.UsernamePassword ? null : ToUserNamePasswordCredentialAsync( item.EncryptedAuthUsername, item.EncryptedAuthPassword).Result }, // Select and batch nodes into published data set sources item => GetNodeModels(item), // Comparer for connection information new FuncCompare <ConnectionModel>((x, y) => x.IsSameAs(y))) .Select(group => group // Flatten all nodes for the same connection and group by publishing interval // then batch in chunks for max 1000 nodes and create data sets from those. .Flatten() .GroupBy(n => n.OpcPublishingInterval) .SelectMany(n => n .Distinct((a, b) => a.Id == b.Id && a.OpcSamplingInterval == b.OpcSamplingInterval) .Batch(1000)) .Select(opcNodes => new PublishedDataSetSourceModel { Connection = group.Key.Clone(), SubscriptionSettings = new PublishedDataSetSettingsModel { PublishingInterval = GetPublishingIntervalFromNodes(opcNodes, legacyCliModel), }, PublishedVariables = new PublishedDataItemsModel { PublishedData = opcNodes .Select(node => new PublishedDataSetVariableModel { Id = node.Id, PublishedVariableNodeId = node.Id, SamplingInterval = node.OpcSamplingIntervalTimespan ?? legacyCliModel.DefaultSamplingInterval ?? (TimeSpan?)null // TODO: Link all to server time sampled at heartbeat interval // HeartbeatInterval = opcNode.HeartbeatInterval == null ? (TimeSpan?)null : // TimeSpan.FromMilliseconds(opcNode.HeartbeatInterval.Value), // SkipFirst = opcNode.SkipFirst, // DisplayName = opcNode.DisplayName }) .ToList() } })) .SelectMany(dataSetSourceBatches => dataSetSourceBatches .Select(dataSetSource => new WriterGroupJobModel { MessagingMode = MessagingMode.Samples, Engine = _config == null ? null : new EngineConfigurationModel { BatchSize = _config.BatchSize, DiagnosticsInterval = _config.DiagnosticsInterval }, WriterGroup = new WriterGroupModel { WriterGroupId = null, DataSetWriters = new List <DataSetWriterModel> { new DataSetWriterModel { DataSetWriterId = Guid.NewGuid().ToString(), DataSet = new PublishedDataSetModel { DataSetSource = dataSetSource.Clone() } } } } }))); }
/// <summary> /// Extract publishing interval from nodes /// </summary> /// <param name="opcNodes"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> private static TimeSpan?GetPublishingIntervalFromNodes(IEnumerable <OpcNodeModel> opcNodes, LegacyCliModel legacyCliModel) { var interval = opcNodes.FirstOrDefault(x => x.OpcPublishingInterval != null)?.OpcPublishingIntervalTimespan; return(interval ?? legacyCliModel.DefaultPublishingInterval); }
/// <summary> /// Read monitored item job from reader /// </summary> /// <param name="items"></param> /// <param name="legacyCliModel">The legacy command line arguments</param> /// <returns></returns> private IEnumerable <WriterGroupJobModel> ToWriterGroupJobs( IEnumerable <PublishedNodesEntryModel> items, LegacyCliModel legacyCliModel) { if (items == null) { return(Enumerable.Empty <WriterGroupJobModel>()); } return(items // Group by connection .GroupBy(item => new ConnectionModel { Endpoint = new EndpointModel { Url = item.EndpointUrl.OriginalString, SecurityMode = item.UseSecurity == false ? SecurityMode.None : SecurityMode.Best, OperationTimeout = legacyCliModel.OperationTimeout }, User = item.OpcAuthenticationMode != OpcAuthenticationMode.UsernamePassword ? null : // if encrypted user is set and cryptoProvider is available, we use the encrypted credentials. (_cryptoProvider != null && !string.IsNullOrWhiteSpace(item.EncryptedAuthUsername)) ? ToUserNamePasswordCredentialAsync(item.EncryptedAuthUsername, item.EncryptedAuthPassword).Result : // if clear text credentials are set, we use them for authentication. !(string.IsNullOrWhiteSpace(item.OpcAuthenticationUsername)) ? new CredentialModel { Type = CredentialType.UserName, Value = JToken.FromObject(new { user = item.OpcAuthenticationUsername, password = item.OpcAuthenticationPassword }) } : null }, // Select and batch nodes into published data set sources item => GetNodeModels(item), // Comparer for connection information new FuncCompare <ConnectionModel>((x, y) => x.IsSameAs(y))) .Select(group => group // Flatten all nodes for the same connection and group by publishing interval // then batch in chunks for max 1000 nodes and create data sets from those. .Flatten() .GroupBy(n => n.OpcPublishingInterval) .SelectMany(n => n .Distinct((a, b) => a.Id == b.Id && a.OpcSamplingInterval == b.OpcSamplingInterval) .Batch(1000)) .Select(opcNodes => new PublishedDataSetSourceModel { Connection = group.Key.Clone(), SubscriptionSettings = new PublishedDataSetSettingsModel { PublishingInterval = GetPublishingIntervalFromNodes(opcNodes, legacyCliModel), ResolveDisplayName = legacyCliModel.FetchOpcNodeDisplayName }, PublishedVariables = new PublishedDataItemsModel { PublishedData = opcNodes .Select(node => new PublishedDataSetVariableModel { Id = node.Id, PublishedVariableNodeId = node.Id, PublishedVariableDisplayName = node.DisplayName, SamplingInterval = node.OpcSamplingIntervalTimespan ?? legacyCliModel.DefaultSamplingInterval ?? (TimeSpan?)null // TODO: Link all to server time sampled at heartbeat interval // HeartbeatInterval = opcNode.HeartbeatInterval == null ? (TimeSpan?)null : // TimeSpan.FromMilliseconds(opcNode.HeartbeatInterval.Value), // SkipFirst = opcNode.SkipFirst, }) .ToList() } })) .SelectMany(dataSetSourceBatches => dataSetSourceBatches .Select(dataSetSource => new WriterGroupJobModel { MessagingMode = legacyCliModel.MessagingMode, Engine = _config == null ? null : new EngineConfigurationModel { BatchSize = _config.BatchSize, DiagnosticsInterval = _config.DiagnosticsInterval }, WriterGroup = new WriterGroupModel { WriterGroupId = _identity.DeviceId + "_" + _identity.ModuleId, DataSetWriters = new List <DataSetWriterModel> { new DataSetWriterModel { DataSetWriterId = _identity.DeviceId + "_" + _identity.ModuleId, DataSet = new PublishedDataSetModel { DataSetSource = dataSetSource.Clone(), }, DataSetFieldContentMask = DataSetFieldContentMask.StatusCode | DataSetFieldContentMask.SourceTimestamp | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ServerTimestamp : 0) | DataSetFieldContentMask.NodeId | DataSetFieldContentMask.DisplayName | DataSetFieldContentMask.ApplicationUri | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.EndpointUrl : 0) | (legacyCliModel.FullFeaturedMessage ? DataSetFieldContentMask.ExtensionFields : 0), MessageSettings = new DataSetWriterMessageSettingsModel() { DataSetMessageContentMask = (legacyCliModel.FullFeaturedMessage ? DataSetContentMask.Timestamp : 0) | DataSetContentMask.MetaDataVersion | DataSetContentMask.Status | DataSetContentMask.DataSetWriterId | DataSetContentMask.MajorVersion | DataSetContentMask.MinorVersion | DataSetContentMask.SequenceNumber } } }, MessageSettings = new WriterGroupMessageSettingsModel() { NetworkMessageContentMask = NetworkMessageContentMask.PublisherId | NetworkMessageContentMask.WriterGroupId | NetworkMessageContentMask.NetworkMessageNumber | NetworkMessageContentMask.SequenceNumber | NetworkMessageContentMask.PayloadHeader | NetworkMessageContentMask.Timestamp | NetworkMessageContentMask.DataSetClassId | NetworkMessageContentMask.NetworkMessageHeader | NetworkMessageContentMask.DataSetMessageHeader } } }))); }