public void CancellationTokenShallCancelTheService() { var cancellationTokenSource = new CancellationTokenSource(); cancellationTokenSource.Cancel(); var store = new InferenceRequestStore(_loggerFactory.Object, _configuration, _kubernetesClient.Object); store.StartAsync(cancellationTokenSource.Token); store.StopAsync(cancellationTokenSource.Token); Thread.Sleep(100); _logger.VerifyLogging($"Inference Request Store Hosted Service is running.", LogLevel.Information, Times.Once()); _logger.VerifyLogging($"Inference Request Store Hosted Service is stopping.", LogLevel.Information, Times.Once()); }
public async Task Take_ShallReturnAJobReadFromCrd() { var cancellationSource = new CancellationTokenSource(); var list = new InferenceRequestCustomResourceList(); list.Items = new List <InferenceRequestCustomResource>(); list.Items.Add(new InferenceRequestCustomResource { Spec = new InferenceRequest { State = InferenceRequestState.InProcess }, Metadata = new V1ObjectMeta { Name = Guid.NewGuid().ToString() } }); list.Items.Add(new InferenceRequestCustomResource { Spec = new InferenceRequest { State = InferenceRequestState.InProcess }, Metadata = new V1ObjectMeta { Name = Guid.NewGuid().ToString() } }); list.Items.Add(new InferenceRequestCustomResource { Spec = new InferenceRequest { State = InferenceRequestState.Queued }, Metadata = new V1ObjectMeta { Name = Guid.NewGuid().ToString() } }); _kubernetesClient .SetupSequence(p => p.ListNamespacedCustomObjectWithHttpMessagesAsync(It.IsAny <CustomResourceDefinition>())) .Returns( Task.FromResult(new HttpOperationResponse <object> { Body = new object(), Response = new HttpResponseMessage { Content = new StringContent(JsonConvert.SerializeObject(list)) } })) .Returns(() => { cancellationSource.Cancel(); Thread.Sleep(100); throw new HttpOperationException("exception"); }); _kubernetesClient .Setup(p => p.PatchNamespacedCustomObjectWithHttpMessagesAsync(It.IsAny <CustomResourceDefinition>(), It.IsAny <CustomResource>(), It.IsAny <string>())) .Returns(Task.FromResult(new HttpOperationResponse <object> { Body = new object(), Response = new HttpResponseMessage(HttpStatusCode.OK) })); var store = new InferenceRequestStore(_loggerFactory.Object, _configuration, _kubernetesClient.Object); await store.StartAsync(cancellationSource.Token); var expectedItem = list.Items.Last(); var item = await store.Take(cancellationSource.Token); Assert.Equal(expectedItem.Spec.JobId, item.JobId); _logger.VerifyLogging($"Inference request added to queue {item.JobId}", LogLevel.Debug, Times.AtLeastOnce()); _logger.VerifyLogging($"Inference Request Store Hosted Service is running.", LogLevel.Information, Times.Once()); await store.StopAsync(cancellationSource.Token); _logger.VerifyLogging($"Inference Request Store Hosted Service is stopping.", LogLevel.Information, Times.Once()); _kubernetesClient.Verify( p => p.PatchNamespacedCustomObjectWithHttpMessagesAsync( It.IsAny <CustomResourceDefinition>(), It.IsAny <InferenceRequestCustomResource>(), expectedItem.Spec.JobId), Times.Once()); }