public async Task Invokes_predict_async_call() { var expectedResultValues = new[] { 1f, 2f, 3f }; var outputTensorProto = new TensorProto { Dtype = DataType.DtFloat }; outputTensorProto.FloatVal.Add(expectedResultValues); outputTensorProto.TensorShape = new TensorShapeProto(); outputTensorProto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim()); outputTensorProto.TensorShape.Dim[0].Size = 3; var predictRequest = new PredictRequest(); var predictResponse = new PredictResponse(); predictResponse.Outputs.Add("output_alias", outputTensorProto); var predictionServiceClientMock = new Mock <IPredictionServiceClient>(); predictionServiceClientMock.Setup(x => x.PredictAsync(predictRequest)).ReturnsAsync(predictResponse).Verifiable(); var scoringRequestMock = new Mock <IScoringRequest>(); scoringRequestMock.Setup(x => x.MakePredictRequest()).Returns(() => predictRequest); var scoringClient = new ScoringClient(predictionServiceClientMock.Object); var result = await scoringClient.ScoreAsync(scoringRequestMock.Object); Assert.Equal(expectedResultValues, result); scoringRequestMock.Verify(x => x.MakePredictRequest(), Times.Exactly(1)); predictionServiceClientMock.Verify(x => x.PredictAsync(predictRequest), Times.Exactly(1)); }
public async stt::Task PredictRequestObjectAsync() { moq::Mock <PredictionService.PredictionServiceClient> mockGrpcClient = new moq::Mock <PredictionService.PredictionServiceClient>(moq::MockBehavior.Strict); PredictRequest request = new PredictRequest { EndpointAsEndpointName = EndpointName.FromProjectLocationEndpoint("[PROJECT]", "[LOCATION]", "[ENDPOINT]"), Instances = { new wkt::Value(), }, Parameters = new wkt::Value(), }; PredictResponse expectedResponse = new PredictResponse { Predictions = { new wkt::Value(), }, DeployedModelId = "deployed_model_idf0bd41af", }; mockGrpcClient.Setup(x => x.PredictAsync(request, moq::It.IsAny <grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall <PredictResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); PredictionServiceClient client = new PredictionServiceClientImpl(mockGrpcClient.Object, null); PredictResponse responseCallSettings = await client.PredictAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); PredictResponse responseCancellationToken = await client.PredictAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); }
public async Task <PredictResponse?> Submit(string modelName, PredictRequest predictRequest, CancellationToken cancellationToken) { predictRequest.VerifyNotNull(nameof(predictRequest)); _clients.TryGetValue(modelName, out ModelRestApi? modelRestApi) .VerifyAssert(x => x == true, $"VersionId={modelName} http client is not registered"); _logger.LogTrace($"{nameof(Submit)}: Calling model {modelName}, Url={modelRestApi!.ApiUrl.GetRequestUrl()}"); try { PostResponse <PredictResponse> response = await modelRestApi.PostRequest(predictRequest); if (response.StatusCode != System.Net.HttpStatusCode.OK) { _logger.LogError($"Format error for failed call to model {modelName}, Url={modelRestApi!.ApiUrl.GetRequestUrl()}"); return(null); } return(new PredictResponse { Model = new Model { Name = response.Value !.Model?.Name, Version = response.Value.Model?.Version, }, Request = response.Value.Request, Intents = (response.Value.Intents ?? Array.Empty <Intent>()) .OrderByDescending(x => x.Score) .Take(predictRequest.IntentLimit ?? response.Value.Intents?.Count ?? 0) .ToList(), });
public float PredicRating(int userId, int movieId) { Channel channel = new Channel(ConfigurationManager.AppSettings["ServingRpcChannel"], ChannelCredentials.Insecure); try { var client = new PredictionService.PredictionServiceClient(channel); var request = new PredictRequest() { ModelSpec = new ModelSpec() { Name = ConfigurationManager.AppSettings["ModelName"], Version = int.Parse(ConfigurationManager.AppSettings["ModelVersion"]), SignatureName = ConfigurationManager.AppSettings["ModelSignature"] } }; var proto1 = GetInput(userId); var proto2 = GetInput(movieId); request.Inputs.Add("userId", proto1); request.Inputs.Add("movieId", proto2); var result = client.Predict(request); var response = JsonConvert.DeserializeObject <ResponseObject>(result.Outputs.ToString()); return((float)response.outputs.floatVal[0]); } finally { channel.ShutdownAsync().Wait(); } }
public async Task PredictAsync() { Mock <PredictionService.PredictionServiceClient> mockGrpcClient = new Mock <PredictionService.PredictionServiceClient>(MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()) .Returns(new Mock <Operations.OperationsClient>().Object); PredictRequest expectedRequest = new PredictRequest { ModelName = new ModelName("[PROJECT]", "[LOCATION]", "[MODEL]"), Payload = new ExamplePayload(), Params = { }, }; PredictResponse expectedResponse = new PredictResponse(); mockGrpcClient.Setup(x => x.PredictAsync(expectedRequest, It.IsAny <CallOptions>())) .Returns(new Grpc.Core.AsyncUnaryCall <PredictResponse>(Task.FromResult(expectedResponse), null, null, null, null)); PredictionServiceClient client = new PredictionServiceClientImpl(mockGrpcClient.Object, null); ModelName name = new ModelName("[PROJECT]", "[LOCATION]", "[MODEL]"); ExamplePayload payload = new ExamplePayload(); IDictionary <string, string> @params = new Dictionary <string, string>(); PredictResponse response = await client.PredictAsync(name, payload, @params); Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); }
public PredictRequest MakePredictRequest() { var request = new PredictRequest { ModelSpec = _modelSpec }; request.Inputs["images"] = _proto; return(request); }
public PredictRequest MakePredictRequest() { var request = new PredictRequest { ModelSpec = _modelSpec }; request.Inputs[this._inputName] = _proto; return(request); }
static void Main(string[] args) { //Create gRPC Channel var channel = new Channel(ConfigurationManager.AppSettings["ServerHost"], ChannelCredentials.Insecure); var client = new PredictionService.PredictionServiceClient(channel); //Check available MNIST model var responce = client.GetModelMetadata(new GetModelMetadataRequest() { ModelSpec = new ModelSpec() { Name = "mnist" }, MetadataField = { "signature_def" } }); Console.WriteLine($"Model Available: {responce.ModelSpec.Name} Ver.{responce.ModelSpec.Version}"); var imagesFolder = ConfigurationManager.AppSettings["ImagesFolder"]; //Process images prediction from 0 to 9 fromexample folder for (int number = 0; number < 10; number++) { //Create prediction request var request = new PredictRequest() { ModelSpec = new ModelSpec() { Name = "mnist", SignatureName = ModelMethodClasses.PredictImages } }; //Add image tensor [1 - 784] using (Stream stream = new FileStream($"{AppDomain.CurrentDomain.BaseDirectory}/{imagesFolder}/{number}.bmp", FileMode.Open)) { request.Inputs.Add("images", TensorBuilder.CreateTensorFromImage(stream, 255.0f)); } //Add keep_prob tensor [1 - 1] request.Inputs.Add("keep_prob", TensorBuilder.CreateTensor(0.5f)); var predictResponse = client.Predict(request); //Compute Max value from prediction array var maxValue = predictResponse.Outputs["scores"].FloatVal.Max(); //Get index of predicted value var predictedValue = predictResponse.Outputs["scores"].FloatVal.IndexOf(maxValue); Console.WriteLine($"Predict: {number} {(number == predictedValue ? "Y" : "N")}"); Console.WriteLine($"Result value: {predictedValue}, probability: {maxValue}"); Console.WriteLine($"All values: {predictResponse.Outputs["scores"].FloatVal}"); Console.WriteLine(""); } channel.ShutdownAsync().Wait(); }
public FloatRequest(IDictionary <string, Tuple <float[], int[]> > inputs) { _proto = new PredictRequest { ModelSpec = new ModelSpec() }; foreach (var(key, value) in inputs) { _proto.Inputs[key] = makeProto(value); } }
public PredictionResult Predict(byte[] byteArray) { TensorProto tensorProto = this.GetTensorProto(byteArray); PredictRequest request = this.GetPredictRequest(tensorProto); DateTime deadline = DateTime.UtcNow.AddSeconds(TIMEOUT); PredictResponse response = _client.Predict(request, new CallOptions(deadline: deadline)); return(this.ParseResponse(response)); }
/// <summary>Snippet for Predict</summary> public async Task PredictRequestObjectAsync() { // Snippet: PredictAsync(PredictRequest, CallSettings) // Create client PredictionServiceClient predictionServiceClient = await PredictionServiceClient.CreateAsync(); // Initialize request argument(s) PredictRequest request = new PredictRequest { PlacementName = PlacementName.FromProjectLocationCatalogEventStorePlacement("[PROJECT]", "[LOCATION]", "[CATALOG]", "[EVENT_STORE]", "[PLACEMENT]"), UserEvent = new UserEvent(), Filter = "", DryRun = false, Params = { { "", new Value() }, }, Labels = { { "", "" }, }, }; // Make the request PagedAsyncEnumerable <PredictResponse, PredictResponse.Types.PredictionResult> response = predictionServiceClient.PredictAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((PredictResponse.Types.PredictionResult item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((PredictResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (PredictResponse.Types.PredictionResult item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page <PredictResponse.Types.PredictionResult> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (PredictResponse.Types.PredictionResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet }
public async Task <IEnumerable <Detection> > Predict(Bitmap bmp) { if (_client == null) { throw new ApplicationException(nameof(_client)); } // Desired image format const int channels = 3; const int width = 300; const int height = 300; const PixelFormat format = PixelFormat.Format24bppRgb; var shape = new TensorShapeProto { Dim = { new [] { new TensorShapeProto.Types.Dim { Name = "", Size = 1 }, new TensorShapeProto.Types.Dim { Name = nameof(height), Size = height }, new TensorShapeProto.Types.Dim { Name = nameof(width), Size = width }, new TensorShapeProto.Types.Dim { Name = nameof(channels), Size = channels }, } } }; var proto = new TensorProto { TensorShape = shape, Dtype = Tensorflow.DataType.DtUint8, TensorContent = ToByteString(bmp, channels, width, height, format) }; var request = new PredictRequest { ModelSpec = new ModelSpec { Name = _model } }; request.Inputs.Add("data", proto); // Send requenst for inference PredictResponse response = await _client.PredictAsync(request); return(ToDetections(response)); }
private PredictRequest GetPredictRequest(TensorProto tensorProto) { PredictRequest request = new PredictRequest(); request.ModelSpec = new ModelSpec() { Name = _modelSpec, SignatureName = _signatureName }; request.Inputs.Add(_inputsKey, tensorProto); return(request); }
public async stt::Task PredictRequestObjectAsync() { moq::Mock <PredictionService.PredictionServiceClient> mockGrpcClient = new moq::Mock <PredictionService.PredictionServiceClient>(moq::MockBehavior.Strict); PredictRequest request = new PredictRequest { Placement = "placementb440552a", UserEvent = new UserEvent(), PageSize = -226905851, PageToken = "page_tokenf09e5538", Filter = "filtere47ac9b2", ValidateOnly = true, Params = { { "key8a0b6e3c", new wkt::Value() }, }, Labels = { { "key8a0b6e3c", "value60c16320" }, }, }; PredictResponse expectedResponse = new PredictResponse { Results = { new PredictResponse.Types.PredictionResult(), }, AttributionToken = "attribution_token14371a88", MissingIds = { "missing_ids9e3bd4de", }, ValidateOnly = true, }; mockGrpcClient.Setup(x => x.PredictAsync(request, moq::It.IsAny <grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall <PredictResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); PredictionServiceClient client = new PredictionServiceClientImpl(mockGrpcClient.Object, null); PredictResponse responseCallSettings = await client.PredictAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); PredictResponse responseCancellationToken = await client.PredictAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); }
/// <summary>Snippet for Predict</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public void PredictRequestObject() { // Create client PredictionServiceClient predictionServiceClient = PredictionServiceClient.Create(); // Initialize request argument(s) PredictRequest request = new PredictRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), Payload = new ExamplePayload(), Params = { { "", "" }, }, }; // Make the request PredictResponse response = predictionServiceClient.Predict(request); }
public async Task Retries_transient_exceptions(StatusCode transientStatusCode) { var exception = new RpcException(new Status(transientStatusCode, string.Empty)); var expectedResultValues = new[] { 1f, 2f, 3f }; var outputTensorProto = new TensorProto { Dtype = DataType.DtFloat }; outputTensorProto.FloatVal.Add(expectedResultValues); outputTensorProto.TensorShape = new TensorShapeProto(); outputTensorProto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim()); outputTensorProto.TensorShape.Dim[0].Size = 3; var predictRequest = new PredictRequest(); var predictResponse = new PredictResponse(); predictResponse.Outputs.Add("output_alias", outputTensorProto); var predictionServiceClientMock = new Mock <IPredictionServiceClient>(); predictionServiceClientMock.Setup(x => x.PredictAsync(predictRequest)).Returns(async() => { await Task.CompletedTask; if (exception != null) { var x = exception; exception = null; throw x; } return(predictResponse); }).Verifiable(); var scoringRequestMock = new Mock <IScoringRequest>(); scoringRequestMock.Setup(x => x.MakePredictRequest()).Returns(() => predictRequest); var scoringClient = new ScoringClient(predictionServiceClientMock.Object); var result = await scoringClient.ScoreAsync(scoringRequestMock.Object); Assert.Equal(expectedResultValues, result); scoringRequestMock.Verify(x => x.MakePredictRequest(), Times.Exactly(1)); predictionServiceClientMock.Verify(x => x.PredictAsync(predictRequest), Times.Exactly(2)); }
string resultString = ""; // final result string List <Tuple <string, float> > SendAndReceive(byte[] imageData) { var tf_channel = new Channel(ipAddress, Convert.ToInt32(port), ChannelCredentials.Insecure); var tf_client = new PredictionService.PredictionServiceClient(tf_channel); try { //Create prediction request var request = new PredictRequest() { ModelSpec = new ModelSpec() { Name = "inception", SignatureName = "predict_images" } }; var imageTensor = TensorProtoBuilder.TensorProtoFromImage(imageData); // Add image tensor to request request.Inputs.Add("images", imageTensor); // Send request and get response PredictResponse predictResponse = tf_client.Predict(request); // Decode response var classesTensor = predictResponse.Outputs["classes"]; string[] classes = TensorProtoDecoder.TensorProtoToStringArray(classesTensor); var scoresTensor = predictResponse.Outputs["scores"]; float[] scores = TensorProtoDecoder.TensorProtoToFloatArray(scoresTensor); List <Tuple <string, float> > predictResult = new List <Tuple <string, float> >(); for (int i = 0; i < classes.Length; i++) { predictResult.Add(new Tuple <string, float>(classes[i], scores[i])); } return(predictResult); } catch (Exception e) { if (e is RpcException) { RpcException re = (RpcException)e; Debug.Log(re.Status.Detail); Debug.Log(re.StatusCode); } Debug.Log(e.Message); throw; } }
/// <summary>Snippet for Predict</summary> public void Predict_RequestObject() { // Snippet: Predict(PredictRequest,CallSettings) // Create client PredictionServiceClient predictionServiceClient = PredictionServiceClient.Create(); // Initialize request argument(s) PredictRequest request = new PredictRequest { ModelName = new ModelName("[PROJECT]", "[LOCATION]", "[MODEL]"), Payload = new ExamplePayload(), }; // Make the request PredictResponse response = predictionServiceClient.Predict(request); // End snippet }
public async Task GivenTestModel_WhenUsed_ShouldResponed() { TestWebsiteHost host = await TestApplication.GetHost(); await host.WaitForStartup(); var request = new PredictRequest { Request = "I am sad", }; PredictResponse predictResponse = (await new ModelRestApi(host.Client).PostRequest(request)).Value !; Verify(predictResponse, request.Request); }
public float PredictIdentity(float attribute1, float attribute2) { Channel channel = new Channel("localhost:7000", ChannelCredentials.Insecure); try { var client = new PredictionService.PredictionServiceClient(channel); var request = new PredictRequest() { ModelSpec = new ModelSpec() { Name = "Sample2", Version = 1, SignatureName = "Predict" } }; var proto = new TensorProto { TensorShape = new TensorShapeProto(), Dtype = DataType.DtFloat }; // one sample in batch, first dimension is always number of samples submitted for prediction proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim() { Size = 1 }); proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim() { Size = 2 }); proto.FloatVal.Add(attribute1); proto.FloatVal.Add(attribute2); request.Inputs.Add("inputs", proto); var result = client.Predict(request); var response = JsonConvert.DeserializeObject <ServingResponse>(result.Outputs.ToString()); return((float)response.outputs.floatVal[0]); } finally { channel.ShutdownAsync().Wait(); } }
/// <summary>Snippet for Predict</summary> public void PredictRequestObject() { // Snippet: Predict(PredictRequest, CallSettings) // Create client PredictionServiceClient predictionServiceClient = PredictionServiceClient.Create(); // Initialize request argument(s) PredictRequest request = new PredictRequest { Name = "", Payload = new ExamplePayload(), Params = { { "", "" }, }, }; // Make the request PredictResponse response = predictionServiceClient.Predict(request); // End snippet }
/// <summary>Snippet for Predict</summary> public void PredictRequestObject() { // Snippet: Predict(PredictRequest, CallSettings) // Create client PredictionServiceClient predictionServiceClient = PredictionServiceClient.Create(); // Initialize request argument(s) PredictRequest request = new PredictRequest { EndpointAsEndpointName = EndpointName.FromProjectLocationEndpoint("[PROJECT]", "[LOCATION]", "[ENDPOINT]"), Instances = { new Value(), }, Parameters = new Value(), }; // Make the request PredictResponse response = predictionServiceClient.Predict(request); // End snippet }
public async Task <ActionResult <PredictResponse> > Submit([FromBody] PredictRequest request) { _logger.LogInformation($"{nameof(Submit)}: {_json.Serialize(request)}"); if (!request.IsValidRequest()) { return(StatusCode((int)HttpStatusCode.BadRequest)); } switch (_executionContext.State) { case ExecutionState.Booting: case ExecutionState.Starting: case ExecutionState.Restarting: return(ReturnNotAvailable()); case ExecutionState.Running: try { PredictResponse hostResponse = await _predict.Submit(new Question { Sentence = request.Request ?? request.Sentence }); _logger.LogInformation($"{nameof(Submit)} answer: {_json.Serialize(hostResponse)}"); var result = new PredictResponse { Model = hostResponse.Model, Request = hostResponse.Request, Intents = hostResponse.Intents .OrderByDescending(x => x.Score) .Take(request.IntentLimit ?? int.MaxValue) .ToList(), }; return(Ok(result)); } catch (Exception ex) { _logger.LogError($"Exception from model. Ex={ex}"); throw; } default: _logger.LogError($"Failed: ExecutionState={_executionContext.State}"); return(StatusCode((int)HttpStatusCode.InternalServerError)); } }
/// <summary>Snippet for PredictAsync</summary> public async Task PredictAsync_RequestObject() { // Snippet: PredictAsync(PredictRequest,CallSettings) // Additional: PredictAsync(PredictRequest,CancellationToken) // Create client PredictionServiceClient predictionServiceClient = await PredictionServiceClient.CreateAsync(); // Initialize request argument(s) PredictRequest request = new PredictRequest { ModelName = new ModelName("[PROJECT]", "[LOCATION]", "[MODEL]"), Payload = new ExamplePayload(), }; // Make the request PredictResponse response = await predictionServiceClient.PredictAsync(request); // End snippet }
public async Task GivenTestModelOldApi_WhenUsed_ShouldResponed() { TestWebsiteHost host = await TestApplication.GetHost(); await host.WaitForStartup(); var question = new PredictRequest { Sentence = "I am happy", }; #pragma warning disable CS0618 // Type or member is obsolete PredictResponse predictResponse = await host.Client.PostMlQuestion(question); #pragma warning restore CS0618 // Type or member is obsolete Verify(predictResponse, question.Sentence); }
public async Task PredictAsync2() { Mock <PredictionService.PredictionServiceClient> mockGrpcClient = new Mock <PredictionService.PredictionServiceClient>(MockBehavior.Strict); PredictRequest request = new PredictRequest { ModelName = new ModelName("[PROJECT]", "[LOCATION]", "[MODEL]"), Payload = new ExamplePayload(), }; PredictResponse expectedResponse = new PredictResponse(); mockGrpcClient.Setup(x => x.PredictAsync(request, It.IsAny <CallOptions>())) .Returns(new Grpc.Core.AsyncUnaryCall <PredictResponse>(Task.FromResult(expectedResponse), null, null, null, null)); PredictionServiceClient client = new PredictionServiceClientImpl(mockGrpcClient.Object, null); PredictResponse response = await client.PredictAsync(request); Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); }
public void Predict2() { Mock <PredictionService.PredictionServiceClient> mockGrpcClient = new Mock <PredictionService.PredictionServiceClient>(MockBehavior.Strict); PredictRequest request = new PredictRequest { ModelName = new ModelName("[PROJECT]", "[LOCATION]", "[MODEL]"), Payload = new ExamplePayload(), }; PredictResponse expectedResponse = new PredictResponse(); mockGrpcClient.Setup(x => x.Predict(request, It.IsAny <CallOptions>())) .Returns(expectedResponse); PredictionServiceClient client = new PredictionServiceClientImpl(mockGrpcClient.Object, null); PredictResponse response = client.Predict(request); Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); }
public async stt::Task PredictRequestObjectAsync() { moq::Mock <PredictionService.PredictionServiceClient> mockGrpcClient = new moq::Mock <PredictionService.PredictionServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock <lro::Operations.OperationsClient>().Object); PredictRequest request = new PredictRequest { ModelName = ModelName.FromProjectLocationModel("[PROJECT]", "[LOCATION]", "[MODEL]"), Payload = new ExamplePayload(), Params = { { "key8a0b6e3c", "value60c16320" }, }, }; PredictResponse expectedResponse = new PredictResponse { Payload = { new AnnotationPayload(), }, Metadata = { { "key8a0b6e3c", "value60c16320" }, }, PreprocessedInput = new ExamplePayload(), }; mockGrpcClient.Setup(x => x.PredictAsync(request, moq::It.IsAny <grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall <PredictResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); PredictionServiceClient client = new PredictionServiceClientImpl(mockGrpcClient.Object, null); PredictResponse responseCallSettings = await client.PredictAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); PredictResponse responseCancellationToken = await client.PredictAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); }
public PredictRequest MakePredictRequest() { var request = new PredictRequest { ModelSpec = _modelSpec }; var proto = new TensorProto { Dtype = DataType.DtString }; var bytes = _images.Select(ByteString.FromStream); proto.StringVal.AddRange(bytes); proto.TensorShape = new TensorShapeProto(); proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim()); proto.TensorShape.Dim[0].Size = _images.Length; request.Inputs["images"] = proto; return(request); }
/// <summary>Snippet for PredictAsync</summary> public async Task PredictRequestObjectAsync() { // Snippet: PredictAsync(PredictRequest, CallSettings) // Additional: PredictAsync(PredictRequest, CancellationToken) // Create client PredictionServiceClient predictionServiceClient = await PredictionServiceClient.CreateAsync(); // Initialize request argument(s) PredictRequest request = new PredictRequest { Name = "", Payload = new ExamplePayload(), Params = { { "", "" }, }, }; // Make the request PredictResponse response = await predictionServiceClient.PredictAsync(request); // End snippet }