public void PnPlcPubSubDataSetFieldId2Test() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""DataSetFieldId"": ""testfieldid1"" }, { ""Id"": ""i=2259"", ""DataSetFieldId"": ""testfieldid2"" } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Equal(2, jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.Count); Assert.Equal("testfieldid1", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.First().Id); Assert.Equal("testfieldid2", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.Last().Id); }
/// <summary> /// Creates a new class of the LegacyJobOrchestrator. /// </summary> /// <param name="publishedNodesJobConverter">The converter to read the job from the specified file.</param> /// <param name="legacyCliModelProvider">The provider that provides the legacy command line arguments.</param> /// <param name="jobSerializer">The serializer to (de)serialize job information.</param> /// <param name="logger">Logger to write log messages.</param> /// <param name="identity">Module's identity provider.</param> public LegacyJobOrchestrator(PublishedNodesJobConverter publishedNodesJobConverter, ILegacyCliModelProvider legacyCliModelProvider, IJobSerializer jobSerializer, ILogger logger, IIdentity identity) { _publishedNodesJobConverter = publishedNodesJobConverter ?? throw new ArgumentNullException(nameof(publishedNodesJobConverter)); _legacyCliModel = legacyCliModelProvider.LegacyCliModel ?? throw new ArgumentNullException(nameof(legacyCliModelProvider)); _jobSerializer = jobSerializer ?? throw new ArgumentNullException(nameof(jobSerializer)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _identity = identity ?? throw new ArgumentNullException(nameof(identity)); var directory = Path.GetDirectoryName(_legacyCliModel.PublishedNodesFile); if (string.IsNullOrWhiteSpace(directory)) { directory = Environment.CurrentDirectory; } var file = Path.GetFileName(_legacyCliModel.PublishedNodesFile); _fileSystemWatcher = new FileSystemWatcher(directory, file); _fileSystemWatcher.Changed += _fileSystemWatcher_Changed; _fileSystemWatcher.EnableRaisingEvents = true; RefreshJobFromFile(); }
/// <summary> /// Creates a new class of the LegacyJobOrchestrator. /// </summary> /// <param name="publishedNodesJobConverter">The converter to read the job from the specified file.</param> /// <param name="legacyCliModelProvider">The provider that provides the legacy command line arguments.</param> /// <param name="agentConfigPriovider">The provider that provides the agent configuration.</param> /// <param name="jobSerializer">The serializer to (de)serialize job information.</param> /// <param name="logger">Logger to write log messages.</param> /// <param name="identity">Module's identity provider.</param> public LegacyJobOrchestrator(PublishedNodesJobConverter publishedNodesJobConverter, ILegacyCliModelProvider legacyCliModelProvider, IAgentConfigProvider agentConfigPriovider, IJobSerializer jobSerializer, ILogger logger, IIdentity identity) { _publishedNodesJobConverter = publishedNodesJobConverter ?? throw new ArgumentNullException(nameof(publishedNodesJobConverter)); _legacyCliModel = legacyCliModelProvider.LegacyCliModel ?? throw new ArgumentNullException(nameof(legacyCliModelProvider)); _agentConfig = agentConfigPriovider.Config ?? throw new ArgumentNullException(nameof(agentConfigPriovider)); _jobSerializer = jobSerializer ?? throw new ArgumentNullException(nameof(jobSerializer)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _identity = identity ?? throw new ArgumentNullException(nameof(identity)); var directory = Path.GetDirectoryName(_legacyCliModel.PublishedNodesFile); if (string.IsNullOrWhiteSpace(directory)) { directory = Environment.CurrentDirectory; } _availableJobs = new Queue <JobProcessingInstructionModel>(); _assignedJobs = new ConcurrentDictionary <string, JobProcessingInstructionModel>(); var file = Path.GetFileName(_legacyCliModel.PublishedNodesFile); _fileSystemWatcher = new FileSystemWatcher(directory, file); _fileSystemWatcher.Changed += _fileSystemWatcher_Changed; _fileSystemWatcher.EnableRaisingEvents = true; RefreshJobFromFile(); }
public void PnPlcExpandedNodeIdTest() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""ExpandedNodeId"": ""nsu=http://opcfoundation.org/UA/;i=2258"" } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), new StandaloneIdentity()); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Single(jobs .Single().WriterGroup.DataSetWriters); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.Equal("opc.tcp://localhost:50000", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url); }
public void PnPlcPubSubDataSetPublishingInterval4Test() { var pn = @" [ { ""DataSetPublishingInterval"": ""1000"", ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""OpcPublishingInterval"": 2000 }, { ""Id"": ""i=2259"", ""OpcPublishingInterval"": 3000 } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel() { DefaultPublishingInterval = TimeSpan.FromMilliseconds(2000) }); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Equal(1000, jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.SubscriptionSettings.PublishingInterval.Value.TotalMilliseconds); }
/// <summary> /// Constructor that initializes common resources used by tests. /// </summary> public StandaloneJobOrchestratorTests() { _agentConfigModel = new AgentConfigModel(); _agentConfigProviderMock = new Mock <IAgentConfigProvider>(); _agentConfigProviderMock.Setup(p => p.Config).Returns(_agentConfigModel); _newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); _newtonSoftJsonSerializerRaw = new NewtonSoftJsonSerializerRaw(); _publisherJobSerializer = new PublisherJobSerializer(_newtonSoftJsonSerializer); _logger = TraceLogger.Create(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); _publishedNodesJobConverter = new PublishedNodesJobConverter(_logger, _newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); // Note that each test is responsible for setting content of _tempFile; Utils.CopyContent("Engine/empty_pn.json", _tempFile); _standaloneCliModel = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; _standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); _standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(_standaloneCliModel); _publishedNodesProvider = new PublishedNodesProvider(_standaloneCliModelProviderMock.Object, _logger); }
public void PnPlcHeartbeatSkipSingleTrueTest() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""SkipSingle"": true } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.Single(jobs .Single().WriterGroup.DataSetWriters); Assert.Equal("opc.tcp://localhost:50000", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url); }
public void PnPlcExpandedNodeId3Test() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"" }, { ""Id"": ""ns=2;s=DipData"" }, { ""Id"": ""nsu=http://microsoft.com/Opc/OpcPlc/;s=NegativeTrendData"" } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.Single(jobs .Single().WriterGroup.DataSetWriters); Assert.Equal("opc.tcp://localhost:50000", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url); }
public void Test_PnJson_With_Multiple_Jobs_Expect_DifferentJobIds() { var legacyCliModelProviderMock = new Mock <ILegacyCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var publishedNodesJobConverter = new PublishedNodesJobConverter(TraceLogger.Create(), newtonSoftJsonSerializer); var legacyCliModel = new LegacyCliModel { PublishedNodesFile = "Engine/pn_assets.json" }; legacyCliModelProviderMock.Setup(p => p.LegacyCliModel).Returns(legacyCliModel); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var converter = new LegacyJobOrchestrator(publishedNodesJobConverter, legacyCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, TraceLogger.Create(), identityMock.Object); var job1 = converter.GetAvailableJobAsync(1.ToString(), new JobRequestModel()).GetAwaiter().GetResult(); Assert.NotNull(job1); var job2 = converter.GetAvailableJobAsync(2.ToString(), new JobRequestModel()).GetAwaiter().GetResult(); Assert.NotNull(job2); var job3 = converter.GetAvailableJobAsync(3.ToString(), new JobRequestModel()).GetAwaiter().GetResult(); Assert.Null(job3); Assert.NotEqual(job1.Job.Id, job2.Job.Id); }
public async Task GetAvailableJobAsyncMulithreading() { var legacyCliModelProviderMock = new Mock <ILegacyCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var publishedNodesJobConverter = new PublishedNodesJobConverter(TraceLogger.Create(), newtonSoftJsonSerializer); var legacyCliModel = new LegacyCliModel { PublishedNodesFile = "Engine/publishednodes.json" }; legacyCliModelProviderMock.Setup(p => p.LegacyCliModel).Returns(legacyCliModel); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var converter = new LegacyJobOrchestrator(publishedNodesJobConverter, legacyCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, TraceLogger.Create(), identityMock.Object); var tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < 10; i++) { tasks.Add(converter.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } await Task.WhenAll(tasks); Assert.Equal(2, tasks.Count(t => t.Result != null)); var distinctConfigurations = tasks .Where(t => t.Result != null) .Select(t => t.Result.Job.JobConfiguration) .Distinct(); Assert.Equal(2, distinctConfigurations.Count()); }
public void PnPlcPublishingIntervalCliTest() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"" } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel() { DefaultPublishingInterval = TimeSpan.FromSeconds(10) }); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Single(jobs .Single().WriterGroup.DataSetWriters); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.Equal("opc.tcp://localhost:50000", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url); Assert.Equal(10000, jobs.Single().WriterGroup.DataSetWriters.Single() .DataSet.DataSetSource.SubscriptionSettings.PublishingInterval.Value.TotalMilliseconds); }
public void PnPlcPubSubDataSetWriterGroupTest() { var pn = @" [ { ""DataSetWriterGroup"": ""testgroup"", ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""HeartbeatInterval"": 2 } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Equal("testgroup", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Group); }
public void PnPlcPubSubPublishedNodeDisplayName4Test() { var pn = @" [ { ""DataSetPublishingInterval"": ""1000"", ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""DataSetFieldId"": ""testdatasetfieldid1"", }, ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel() { DefaultPublishingInterval = TimeSpan.FromMilliseconds(2000) }); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Null(jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.Single().PublishedVariableDisplayName); }
public void PnPlcHeartbeatSkipSingleFalseTest() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""SkipSingle"": false } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), new StandaloneIdentity()); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Single(jobs .Single().WriterGroup.DataSetWriters); Assert.Equal("opc.tcp://localhost:50000", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url); }
public void PnPlcHeartbeatInterval2Test() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""HeartbeatInterval"": 2 } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer, new StandaloneIdentity()); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.Single(jobs .Single().WriterGroup.DataSetWriters); Assert.Equal("opc.tcp://localhost:50000", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url); Assert.Equal(2, jobs.Single() .WriterGroup.DataSetWriters.Single() .DataSet.DataSetSource.PublishedVariables.PublishedData.Single() .HeartbeatInterval.Value.TotalSeconds); }
public void PnPlcMultiJob2Test() { var pn = @" [ { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""NodeId"": { ""Identifier"": ""i=2258"", } }, { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""NodeId"": { ""Identifier"": ""ns=0;i=2261"" } }, { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""OpcPublishingInterval"": 1000, ""ExpandedNodeId"": ""nsu=http://microsoft.com/Opc/OpcPlc/;s=AlternatingBoolean"" } ] }, { ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""OpcPublishingInterval"": 2000, ""Id"": ""i=2262"" }, { ""OpcPublishingInterval"": 3000, ""Id"": ""ns=2;s=DipData"" }, { ""Id"": ""nsu=http://microsoft.com/Opc/OpcPlc/;s=NegativeTrendData"" } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer, new StandaloneIdentity()); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); // No jobs Assert.NotEmpty(jobs); Assert.Equal(4, jobs.Count()); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.All(jobs, j => Assert.Single(j.WriterGroup.DataSetWriters)); Assert.All(jobs, j => Assert.Equal("opc.tcp://localhost:50000", j.WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url)); }
public async Task UnpublishNodesOnExistingConfiguration(string publishedNodesFile) { var standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var logger = TraceLogger.Create(); var publishedNodesJobConverter = new PublishedNodesJobConverter(logger, newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); Utils.CopyContent(publishedNodesFile, _tempFile); var standaloneCliModel = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(standaloneCliModel); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var publishedNodesProvider = new PublishedNodesProvider(standaloneCliModelProviderMock.Object, logger); var orchestrator = new StandaloneJobOrchestrator( publishedNodesJobConverter, standaloneCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, logger, publishedNodesProvider, newtonSoftJsonSerializer ); string payload = Utils.GetFileContent(publishedNodesFile); var unpublishNodesRequest = newtonSoftJsonSerializer.Deserialize <List <PublishedNodesEntryModel> >(payload); foreach (var request in unpublishNodesRequest) { await FluentActions .Invoking(async() => await orchestrator.UnpublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } var tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < 10; i++) { tasks.Add(orchestrator.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } await Task.WhenAll(tasks).ConfigureAwait(false); tasks.Count(t => t.Result != null) .Should() .Be(0); }
public void PnPlcEmptyTest() { var pn = @" [ ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel()); // No jobs Assert.Empty(jobs); }
public void PnPlcMultiJobBatching2Test() { var pn = new StringBuilder(@" [ { ""EndpointUrl"": ""opc.tcp://*****:*****@" { ""Id"": ""i=10000"" } ] } ] "); var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn.ToString()), new LegacyCliModel()).ToList(); // No jobs Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.Equal(10, jobs.Single().WriterGroup.DataSetWriters.Count()); Assert.All(jobs.Single().WriterGroup.DataSetWriters, dataSetWriter => Assert.Equal("opc.tcp://localhost:50000", dataSetWriter.DataSet.DataSetSource.Connection.Endpoint.Url)); Assert.Equal(jobs.Single().WriterGroup.DataSetWriters.Select(dataSetWriter => dataSetWriter.DataSet.DataSetSource.SubscriptionSettings?.PublishingInterval).ToList(), new TimeSpan?[] { TimeSpan.FromMilliseconds(1000), TimeSpan.FromMilliseconds(1000), TimeSpan.FromMilliseconds(1000), TimeSpan.FromMilliseconds(1000), TimeSpan.FromMilliseconds(1000), null, null, null, null, null }); Assert.All(jobs.Single().WriterGroup.DataSetWriters, dataSetWriter => Assert.All( dataSetWriter.DataSet.DataSetSource.PublishedVariables.PublishedData, p => Assert.Null(p.SamplingInterval))); Assert.All(jobs.Single().WriterGroup.DataSetWriters, dataSetWriter => Assert.Equal(1000, dataSetWriter.DataSet.DataSetSource.PublishedVariables.PublishedData.Count)); }
/// <summary> /// publish nodes from publishedNodesFile /// </summary> private async Task <PublisherMethodsController> publishNodeAsync(string publishedNodesFile) { var standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var logger = TraceLogger.Create(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); var publishedNodesJobConverter = new PublishedNodesJobConverter(logger, newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); Utils.CopyContent("Engine/empty_pn.json", _tempFile); var standaloneCli = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(standaloneCli); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var publishedNodesProvider = new PublishedNodesProvider(standaloneCliModelProviderMock.Object, logger); var orchestrator = new StandaloneJobOrchestrator( publishedNodesJobConverter, standaloneCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, logger, publishedNodesProvider, newtonSoftJsonSerializer ); var methodsController = new PublisherMethodsController(orchestrator); using var publishPayloads = new StreamReader(publishedNodesFile); var publishNodesRequest = newtonSoftJsonSerializer.Deserialize <List <PublishNodesEndpointApiModel> >( await publishPayloads.ReadToEndAsync().ConfigureAwait(false)); foreach (var request in publishNodesRequest) { await FluentActions .Invoking(async() => await methodsController.PublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } return(methodsController); }
public void PnPlcPubSubFullTest() { var pn = @" [ { ""DataSetWriterGroup"": ""testgroup"", ""DataSetWriterId"": ""testwriterid"", ""DataSetPublishingInterval"": 1000, ""EndpointUrl"": ""opc.tcp://localhost:50000"", ""OpcNodes"": [ { ""Id"": ""i=2258"", ""DataSetFieldId"": ""testfieldid1"", ""OpcPublishingInterval"": 2000 }, { ""Id"": ""i=2259"", } ] } ] "; var converter = new PublishedNodesJobConverter(TraceLogger.Create(), _serializer); var jobs = converter.Read(new StringReader(pn), new LegacyCliModel() { DefaultPublishingInterval = TimeSpan.FromSeconds(5) }); Assert.NotEmpty(jobs); Assert.Single(jobs); Assert.Equal(2, jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.Count); Assert.Equal("testfieldid1", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.First().Id); Assert.Equal("i=2259", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.Last().Id); Assert.Equal("testgroup", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Group); Assert.Equal("testwriterid", jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Id); Assert.Equal(1000, jobs .Single().WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.SubscriptionSettings.PublishingInterval.Value.TotalMilliseconds); }
public void PnPlcMultiJobBatchingTest() { var pn = new StringBuilder(@" [ { ""EndpointUrl"": ""opc.tcp://*****:*****@" { ""Id"": ""i=10000"" } ] } ] "); var converter = new PublishedNodesJobConverter(TraceLogger.Create(), new StandaloneIdentity()); var jobs = converter.Read(new StringReader(pn.ToString()), new LegacyCliModel()).ToList(); // No jobs Assert.NotEmpty(jobs); Assert.Equal(10, jobs.Count()); Assert.All(jobs, j => Assert.Equal(MessagingMode.Samples, j.MessagingMode)); Assert.All(jobs, j => Assert.Null(j.ConnectionString)); Assert.All(jobs, j => Assert.Single(j.WriterGroup.DataSetWriters)); Assert.All(jobs, j => Assert.Equal("opc.tcp://localhost:50000", j.WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.Connection.Endpoint.Url)); Assert.All(jobs, j => Assert.Null( j.WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.SubscriptionSettings.PublishingInterval)); Assert.All(jobs, j => Assert.All( j.WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData, p => Assert.Null(p.SamplingInterval))); Assert.All(jobs, j => Assert.Equal(1000, j.WriterGroup.DataSetWriters .Single().DataSet.DataSetSource.PublishedVariables.PublishedData.Count)); }
public async Task DmApiGetConfiguredEndpointsTest(string publishedNodesFile) { var standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var logger = TraceLogger.Create(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); var publishedNodesJobConverter = new PublishedNodesJobConverter(logger, newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); Utils.CopyContent("Engine/empty_pn.json", _tempFile); var standaloneCli = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(standaloneCli); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var publishedNodesProvider = new PublishedNodesProvider(standaloneCliModelProviderMock.Object, logger); var orchestrator = new StandaloneJobOrchestrator( publishedNodesJobConverter, standaloneCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, logger, publishedNodesProvider, newtonSoftJsonSerializer ); var methodsController = new PublisherMethodsController(orchestrator); using var publishPayloads = new StreamReader(publishedNodesFile); var publishNodesRequests = newtonSoftJsonSerializer.Deserialize <List <PublishNodesEndpointApiModel> > (await publishPayloads.ReadToEndAsync().ConfigureAwait(false)); // Check that GetConfiguredEndpointsAsync returns empty list var endpoints = await FluentActions .Invoking(async() => await methodsController .GetConfiguredEndpointsAsync().ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); endpoints.Subject.Endpoints.Count.Should().Be(0); // Publish nodes foreach (var request in publishNodesRequests) { await FluentActions .Invoking(async() => await methodsController .PublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } // Check configured endpoints count endpoints = await FluentActions .Invoking(async() => await methodsController .GetConfiguredEndpointsAsync().ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); endpoints.Subject.Endpoints.Count.Should().Be(5); endpoints.Subject.Endpoints[0].Tag.Should().Be("Tag_Leaf0_10000_3085991c-b85c-4311-9bfb-a916da952234"); endpoints.Subject.Endpoints[1].Tag.Should().Be("Tag_Leaf1_10000_2e4fc28f-ffa2-4532-9f22-378d47bbee5d"); endpoints.Subject.Endpoints[2].Tag.Should().Be("Tag_Leaf2_10000_3085991c-b85c-4311-9bfb-a916da952234"); endpoints.Subject.Endpoints[3].Tag.Should().Be("Tag_Leaf3_10000_2e4fc28f-ffa2-4532-9f22-378d47bbee5d"); endpoints.Subject.Endpoints[4].Tag.Should().BeNull(); var endpointsHash = endpoints.Subject.Endpoints.Select(e => e.GetHashCode()).ToList(); Assert.True(endpointsHash.Distinct().Count() == endpointsHash.Count()); }
public async Task DmApiPublishNodesToJobTest(string publishedNodesFile) { var standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var logger = TraceLogger.Create(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); var publishedNodesJobConverter = new PublishedNodesJobConverter(logger, newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); Utils.CopyContent("Engine/empty_pn.json", _tempFile); var standaloneCli = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(standaloneCli); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var publishedNodesProvider = new PublishedNodesProvider(standaloneCliModelProviderMock.Object, logger); var orchestrator = new StandaloneJobOrchestrator( publishedNodesJobConverter, standaloneCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, logger, publishedNodesProvider, newtonSoftJsonSerializer ); var methodsController = new PublisherMethodsController(orchestrator); using var publishPayloads = new StreamReader(publishedNodesFile); var publishNodesRequests = newtonSoftJsonSerializer.Deserialize <List <PublishNodesEndpointApiModel> > (await publishPayloads.ReadToEndAsync().ConfigureAwait(false)); foreach (var request in publishNodesRequests) { await FluentActions .Invoking(async() => await methodsController .PublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } var tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < 10; i++) { tasks.Add(orchestrator.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } var job = tasks.Where(t => t.Result != null) .Select(t => t.Result.Job) .Distinct(); job.Count() .Should() .Be(2); var jobModel = jobSerializer.DeserializeJobConfiguration( job.First().JobConfiguration, job.First().JobConfigurationType) as WriterGroupJobModel; jobModel.WriterGroup.DataSetWriters.Count.Should().Be(4); foreach (var datasetWriter in jobModel.WriterGroup.DataSetWriters) { datasetWriter.DataSet.DataSetSource.Connection.Endpoint.Url .Should() .Be(publishNodesRequests.First().EndpointUrl); datasetWriter.DataSet.DataSetSource.Connection.Endpoint.SecurityMode .Should() .Be(publishNodesRequests.First().UseSecurity ? SecurityMode.Best : SecurityMode.None); datasetWriter.DataSet.DataSetSource.Connection.User. IsSameAs(new CredentialModel { Type = publishNodesRequests.First().OpcAuthenticationMode == AuthenticationMode.Anonymous ? CredentialType.None : CredentialType.UserName, Value = newtonSoftJsonSerializer.FromObject( new { user = publishNodesRequests.First().UserName, password = publishNodesRequests.First().Password, }) }) .Should() .BeTrue(); } }
public async Task DmApiPublishUnpublishNodesTest(string publishedNodesFile) { var standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var identityMock = new Mock <IIdentity>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var logger = TraceLogger.Create(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); var publishedNodesJobConverter = new PublishedNodesJobConverter(logger, newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); Utils.CopyContent("Engine/empty_pn.json", _tempFile); var standaloneCli = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(standaloneCli); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var publishedNodesProvider = new PublishedNodesProvider(standaloneCliModelProviderMock.Object, logger); var orchestrator = new StandaloneJobOrchestrator( publishedNodesJobConverter, standaloneCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, logger, publishedNodesProvider, newtonSoftJsonSerializer ); var methodsController = new PublisherMethodsController(orchestrator); using var publishPayloads = new StreamReader(publishedNodesFile); var publishNodesRequest = newtonSoftJsonSerializer.Deserialize <List <PublishNodesEndpointApiModel> >( await publishPayloads.ReadToEndAsync().ConfigureAwait(false)); foreach (var request in publishNodesRequest) { var initialNode = request.OpcNodes.First(); for (int i = 0; i < 10000; i++) { request.OpcNodes.Add(new PublishedNodeApiModel { Id = initialNode.Id + i.ToString(), DataSetFieldId = initialNode.DataSetFieldId, DisplayName = initialNode.DisplayName, ExpandedNodeId = initialNode.ExpandedNodeId, HeartbeatIntervalTimespan = initialNode.HeartbeatIntervalTimespan, OpcPublishingInterval = initialNode.OpcPublishingInterval, OpcSamplingInterval = initialNode.OpcSamplingInterval, QueueSize = initialNode.QueueSize, // ToDo: Implement mechanism for SkipFirst. SkipFirst = initialNode.SkipFirst, }); } await FluentActions .Invoking(async() => await methodsController.PublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } var tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < 10; i++) { tasks.Add(orchestrator.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } tasks.Where(t => t.Result != null) .Select(t => t.Result.Job.JobConfiguration) .Distinct().Count() .Should() .Be(2); foreach (var request in publishNodesRequest) { await FluentActions .Invoking(async() => await methodsController .UnpublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < 10; i++) { tasks.Add(orchestrator.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } tasks.Where(t => t.Result != null).Count() .Should() .Be(0); }
public async Task UnpublishNodesOnNonExistingConfiguration(string existingConfig, string newConfig) { var standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var logger = TraceLogger.Create(); var publishedNodesJobConverter = new PublishedNodesJobConverter(logger, newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); Utils.CopyContent(existingConfig, _tempFile); var standaloneCliModel = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(standaloneCliModel); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var publishedNodesProvider = new PublishedNodesProvider(standaloneCliModelProviderMock.Object, logger); var orchestrator = new StandaloneJobOrchestrator( publishedNodesJobConverter, standaloneCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, logger, publishedNodesProvider, newtonSoftJsonSerializer ); string payload = Utils.GetFileContent(newConfig); var unpublishNodesRequest = newtonSoftJsonSerializer.Deserialize <List <PublishedNodesEntryModel> >(payload); foreach (var request in unpublishNodesRequest) { await FluentActions .Invoking(async() => await orchestrator.UnpublishNodesAsync(request).ConfigureAwait(false)) .Should() .ThrowAsync <MethodCallStatusException>() .WithMessage($"{{\"Message\":\"Response 404 Endpoint not found: {request.EndpointUrl}\",\"Details\":{{}}}}") .ConfigureAwait(false); } var tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < 10; i++) { tasks.Add(orchestrator.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } await Task.WhenAll(tasks).ConfigureAwait(false); tasks.Count(t => t.Result != null) .Should() .Be(2); var distinctConfigurations = tasks .Where(t => t.Result != null) .Select(t => t.Result.Job.JobConfiguration) .Distinct(); distinctConfigurations.Count() .Should() .Be(2); }
public async Task PublishNodesStressTest() { var standaloneCliModelProviderMock = new Mock <IStandaloneCliModelProvider>(); var agentConfigProviderMock = new Mock <IAgentConfigProvider>(); var engineConfigMock = new Mock <IEngineConfiguration>(); var clientConfignMock = new Mock <IClientServicesConfig>(); var newtonSoftJsonSerializer = new NewtonSoftJsonSerializer(); var jobSerializer = new PublisherJobSerializer(newtonSoftJsonSerializer); var logger = TraceLogger.Create(); var publishedNodesJobConverter = new PublishedNodesJobConverter(logger, newtonSoftJsonSerializer, engineConfigMock.Object, clientConfignMock.Object); using (var fileStream = new FileStream(_tempFile, FileMode.Open, FileAccess.Write)) { fileStream.Write(Encoding.UTF8.GetBytes("[]")); } var standaloneCliModel = new StandaloneCliModel { PublishedNodesFile = _tempFile, PublishedNodesSchemaFile = "Storage/publishednodesschema.json" }; standaloneCliModelProviderMock.Setup(p => p.StandaloneCliModel).Returns(standaloneCliModel); agentConfigProviderMock.Setup(p => p.Config).Returns(new AgentConfigModel()); var publishedNodesProvider = new PublishedNodesProvider(standaloneCliModelProviderMock.Object, logger); var orchestrator = new StandaloneJobOrchestrator( publishedNodesJobConverter, standaloneCliModelProviderMock.Object, agentConfigProviderMock.Object, jobSerializer, logger, publishedNodesProvider, newtonSoftJsonSerializer ); var numberOfEndpoints = 100; var numberOfNodes = 1000; var payload = new List <PublishedNodesEntryModel>(); for (int endpointIndex = 0; endpointIndex < numberOfEndpoints; ++endpointIndex) { var model = new PublishedNodesEntryModel { EndpointUrl = new Uri($"opc.tcp://server{endpointIndex}:49580"), }; model.OpcNodes = new List <OpcNodeModel>(); for (var nodeIndex = 0; nodeIndex < numberOfNodes; ++nodeIndex) { model.OpcNodes.Add(new OpcNodeModel { Id = $"ns=2;s=Node-Server-{nodeIndex}", }); } payload.Add(model); } // Publish all nodes. foreach (var request in payload) { await FluentActions .Invoking(async() => await orchestrator.PublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } async Task CheckEndpointsAndNodes( int expectedNumberOfEndpoints, int expectedNumberOfNodes ) { var tasks = new List <Task <JobProcessingInstructionModel> >(); for (var i = 0; i < expectedNumberOfEndpoints + 1; i++) { tasks.Add(orchestrator.GetAvailableJobAsync(i.ToString(), new JobRequestModel())); } await Task.WhenAll(tasks).ConfigureAwait(false); tasks.Count(t => t.Result != null) .Should() .Be(expectedNumberOfEndpoints); var distinctConfigurations = tasks .Where(t => t.Result != null) .Select(t => t.Result.Job.JobConfiguration) .Distinct(); distinctConfigurations.Count() .Should() .Be(expectedNumberOfEndpoints); var writerGroups = tasks .Where(t => t.Result != null) .Select(t => jobSerializer.DeserializeJobConfiguration( t.Result.Job.JobConfiguration, t.Result.Job.JobConfigurationType) as WriterGroupJobModel); writerGroups.Select( jobModel => jobModel.WriterGroup.DataSetWriters .Select(writer => writer.DataSet.DataSetSource.PublishedVariables.PublishedData.Count()) .Sum() ).Count(v => v == expectedNumberOfNodes) .Should() .Be(expectedNumberOfEndpoints); } // Check await CheckEndpointsAndNodes(numberOfEndpoints, numberOfNodes).ConfigureAwait(false); // Publish one more node for each endpoint. var payloadDiff = new List <PublishedNodesEntryModel>(); for (int endpointIndex = 0; endpointIndex < numberOfEndpoints; ++endpointIndex) { var model = new PublishedNodesEntryModel { EndpointUrl = new Uri($"opc.tcp://server{endpointIndex}:49580"), OpcNodes = new List <OpcNodeModel> { new OpcNodeModel { Id = $"ns=2;s=Node-Server-{numberOfNodes}", } } }; payloadDiff.Add(model); } foreach (var request in payloadDiff) { await FluentActions .Invoking(async() => await orchestrator.PublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } // Check await CheckEndpointsAndNodes(numberOfEndpoints, numberOfNodes + 1).ConfigureAwait(false); // Unpublish new nodes for each endpoint. foreach (var request in payloadDiff) { await FluentActions .Invoking(async() => await orchestrator.UnpublishNodesAsync(request).ConfigureAwait(false)) .Should() .NotThrowAsync() .ConfigureAwait(false); } // Check await CheckEndpointsAndNodes(numberOfEndpoints, numberOfNodes).ConfigureAwait(false); }