예제 #1
0
        /// <summary>
        /// Single threaded Prosit prediction for several inputs
        /// </summary>
        /// <param name="predictionClient">Client to use for prediction</param>
        /// <param name="settings">Settings to use for constructing </param>
        /// <param name="inputs">The precursors (and other info) to make predictions for</param>
        /// <param name="token">Token for cancelling prediction</param>
        /// <returns>Predictions from Prosit</returns>
        public TSkylineOutput Predict(PredictionService.PredictionServiceClient predictionClient,
                                      SrmSettings settings, IList <TSkylineInputRow> inputs, CancellationToken token)
        {
            inputs = inputs.Distinct().ToArray();

            var validSkylineInputs = new List <TSkylineInputRow>(inputs.Count);
            var prositInputs       = new List <TPrositInputRow>(inputs.Count);

            foreach (var singleInput in inputs)
            {
                var input = CreatePrositInputRow(settings, singleInput, out _);
                if (input != null)
                {
                    prositInputs.Add(input);
                    validSkylineInputs.Add(singleInput);
                }
            }

            var prositIn   = CreatePrositInput(prositInputs);
            var prediction = Predict(predictionClient, prositIn, token);

            return(CreateSkylineOutput(settings, validSkylineInputs, prediction));
        }
예제 #2
0
        /// <summary>
        /// Private version of Predict that works with data structures at
        /// the Prosit level
        /// </summary>
        /// <param name="predictionClient">Client to use for prediction</param>
        /// <param name="inputData">Input data, consisting tensors to send for prediction</param>
        /// <param name="token">Token for cancelling prediction</param>
        /// <returns>Predicted tensors from Prosit</returns>
        private TPrositOut Predict(PredictionService.PredictionServiceClient predictionClient, TPrositIn inputData, CancellationToken token)
        {
            var predictRequest = new PredictRequest();

            predictRequest.ModelSpec = new ModelSpec {
                Name = Model                                        /*, SignatureName = model.Signature*/
            };

            try {
                // Copy input
                var inputs = predictRequest.Inputs;
                foreach (var kvp in inputData.PrositTensors)
                {
                    inputs[kvp.Key] = kvp.Value;
                }

                // Make prediction
                var predictResponse = predictionClient.Predict(predictRequest, cancellationToken: token);
                return(CreatePrositOutput(predictResponse.Outputs));
            }
            catch (RpcException ex) {
                throw new PrositException(ex.Message, ex);
            }
        }
예제 #3
0
        public PredictionResult PredictNumber([FromBody] PredictionRequest model)
        {
            try
            {
                //Load Bitmap from input base64
                Bitmap convertedImage = null;

                using (var str = new MemoryStream(Convert.FromBase64String(model.ImageData)))
                {
                    str.Position = 0;
                    using (var bmp = Image.FromStream(str))
                    {
                        //Resize image and convert to rgb24
                        convertedImage = ImageUtils.ResizeImage(bmp, 28, 28, 280, 280);
                    }
                }

                //Create channel
                var channel = new Channel(_configuration.GetSection("TfServer")["ServerUrl"], ChannelCredentials.Insecure);
                var client  = new PredictionService.PredictionServiceClient(channel);

                //Init predict request
                var request = new PredictRequest()
                {
                    ModelSpec = new ModelSpec()
                    {
                        Name = "mnist", SignatureName = ModelMethodClasses.PredictImages
                    }
                };

                //Convert image to 28x28 8bit per pixel image data array
                var imageData = ImageUtils.ConvertImageStreamToDimArrays(convertedImage);

                var textDebug = TextUtils.RenderImageData(imageData);

                //add image tensor
                request.Inputs.Add("images", TensorBuilder.CreateTensorFromImage(imageData, 255.0f));
                //add keep_prob tensor
                request.Inputs.Add("keep_prob", TensorBuilder.CreateTensor(1.0f));

                var predictResponse = client.Predict(request);

                var maxValue       = predictResponse.Outputs["scores"].FloatVal.Max();
                var predictedValue = predictResponse.Outputs["scores"].FloatVal.IndexOf(maxValue);

                return(new PredictionResult()
                {
                    Success = true,
                    Results = predictResponse.Outputs["scores"].FloatVal.Select(x => x).ToList(),
                    PredictedNumber = predictedValue,
                    DebugText = textDebug
                });
            }
            catch (Exception ex)
            {
                return(new PredictionResult()
                {
                    Success = false,
                    ErrorMessage = ex.ToString()
                });
            }
        }
 public PredictionServiceClientWrapper(PredictionService.PredictionServiceClient predictionServiceClient)
 {
     _predictionServiceClient = predictionServiceClient;
 }
예제 #5
0
        // This method gets called by the runtime. Use this method to add services to the container.
        /// <summary>
        ///
        /// </summary>
        /// <param name="services"></param>
        /// <returns>The return type IServiceProvider is very important to get AutoFac working. Without this returned IServiceProvider, AutoFac will fail.</returns>
        public IServiceProvider ConfigureServices(IServiceCollection services)
        {
            AutoFacContainer autoFacContainer = new AutoFacContainer();

            ContainerBuilder builder = autoFacContainer.ContainerBuilder;

            Channel channel = new Channel("127.0.0.1:51666", ChannelCredentials.Insecure);

            var client = new PredictionService.PredictionServiceClient(channel);

            // regiseter the client for API usage
            builder.RegisterInstance <PredictionService.PredictionServiceClient>(client);

            // read the house indices file:

            var houseIndicesFile = autoFacContainer.Configuration.GetSection("HouseIndices").Get <string>();

            Dictionary <int, HouseIndex> houseIndices = new Dictionary <int, HouseIndex>();

            using (System.IO.StreamReader houseIndicesReader = new System.IO.StreamReader(houseIndicesFile))
            {
                using (CsvReader houseIndicesCsv = new CsvReader(houseIndicesReader))
                {
                    houseIndicesCsv.Read();
                    while (houseIndicesCsv.Read())
                    {
                        int    index    = int.Parse(houseIndicesCsv[0]);
                        string key      = houseIndicesCsv[1];
                        string postcode = houseIndicesCsv[2];
                        houseIndices.Add(index, new HouseIndex()
                        {
                            Index    = index,
                            Key      = key,
                            Postcode = postcode
                        });
                    }
                }
            }

            builder.RegisterInstance <Dictionary <int, HouseIndex> >(houseIndices);

            services.AddCors(options =>
                             options.AddPolicy(
                                 CorsPolicy,
                                 corsBuilder =>
                                 corsBuilder
                                 .AllowAnyOrigin()
                                 .AllowAnyMethod()
                                 .AllowAnyHeader()
                                 )
                             );

            services.AddMvc().AddJsonOptions(json =>
            {
                json.SerializerSettings.Error            = OnJsonError;
                json.SerializerSettings.ContractResolver = new DefaultContractResolver();
            });

            services.AddSwaggerGen(
                setup =>
                setup.SwaggerDoc(SwaggerApiName,
                                 new Info
            {
                Version        = "1",
                Title          = "House Prediction API Server",
                Description    = "House Prediction API",
                TermsOfService = "N/A"
            })
                );

            builder.Populate(services);
            ApplicationContainer = builder.Build();
            return(new AutofacServiceProvider(ApplicationContainer));
        }
예제 #6
0
        public static void SendRequest(string inputPath, string outputPath)
        {
            IPHostEntry host;
            string      containerIP = "?";
            string      hostName    = "mw-tf-server.uksouth.azurecontainer.io";

            host = Dns.GetHostEntry(hostName); //; //Dns.GetHostEntry(Dns.GetHostName());
            foreach (IPAddress ip in host.AddressList)
            {
                if (ip.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                {
                    containerIP = ip.ToString();
                }
            }

            //Create gRPC Channel
            var channel = new Channel(containerIP + ":8500", ChannelCredentials.Insecure);
            var client  = new PredictionService.PredictionServiceClient(channel);
            //Check available models
            //var responce = client.GetModelMetadata(new GetModelMetadataRequest()
            //{
            //    ModelSpec = new ModelSpec() { Name = "model" },
            //    MetadataField = { "signature_def" }
            //});

            //Console.WriteLine($"Model Available: {responce.ModelSpec.Name} Ver.{responce.ModelSpec.Version}");

            //string imagePath = "C:/WebcamSnapshots/picture.png";

            var request = new PredictRequest()
            {
                ModelSpec = new ModelSpec()
                {
                    Name = "model", SignatureName = "serving_default"
                }
            };

            Stream stream = new FileStream(inputPath, FileMode.Open);

            byte[] b;
            using (BinaryReader br = new BinaryReader(stream))
            {
                b = br.ReadBytes((int)stream.Length);
            }
            string base64String = Convert.ToBase64String(b, 0, b.Length);

            //Console.WriteLine(base64String.Substring(0, 50));
            request.Inputs.Add("input_image", TensorBuilder.CreateTensorFromString(base64String));

            try
            {
                var predictResponse = client.Predict(request);
                var output          = predictResponse.Outputs["output_image"];

                var image_output0 = output.StringVal[0];
                var stri          = image_output0.ToString(Encoding.ASCII);
                //Console.WriteLine(stri);
                for (int i = 0; i < (stri.Length % 4); i++)
                {
                    stri += "=";
                }
                //byte[] image_output1 = ASCIIEncoding.ASCII.GetBytes(stri);

                stri = stri.Replace('_', '/').Replace('-', '+');
                byte[] test = Convert.FromBase64String(stri);

                //string savePath = "C:/WebcamSnapshots/csharp_prediction.png";
                ImageConverter converter = new ImageConverter();
                Image          image     = (Image)converter.ConvertFrom(test);

                image.Save(outputPath, ImageFormat.Png);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }

            channel.ShutdownAsync().Wait();
        }
        public (double dogProbability, double catProbability) PredictImage(byte[] imageData)
        {
            Channel channel = new Channel(ConfigurationManager.AppSettings["ServingRpcChannel"], ChannelCredentials.Insecure);

            try
            {
                using (var factory = new ImageFactory())
                {
                    factory.Load(imageData);

                    var originalWidth  = factory.Image.Width;
                    var originalHeight = factory.Image.Height;

                    var client  = new PredictionService.PredictionServiceClient(channel);
                    var request = new PredictRequest()
                    {
                        ModelSpec = new ModelSpec()
                        {
                            Name          = ConfigurationManager.AppSettings["ModelName"],
                            Version       = int.Parse(ConfigurationManager.AppSettings["ModelVersion"]),
                            SignatureName = ConfigurationManager.AppSettings["ModelSignature"]
                        }
                    };

                    var imageWidth  = int.Parse(ConfigurationManager.AppSettings["ImageWidth"]);
                    var imageHeight = int.Parse(ConfigurationManager.AppSettings["ImageHeight"]);

                    var proto = new TensorProto
                    {
                        TensorShape = new TensorShapeProto()
                    };
                    proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim()
                    {
                        Size = 1
                    });                                                                       // one image in batch
                    proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim()
                    {
                        Size = imageHeight
                    });
                    proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim()
                    {
                        Size = imageWidth
                    });
                    proto.TensorShape.Dim.Add(new TensorShapeProto.Types.Dim()
                    {
                        Size = 3
                    });                                                                       // 3 colour channels
                    proto.Dtype = DataType.DtFloat;

                    using (var resizedImage = factory.Resize(new Size(imageWidth, imageHeight)))
                    {
                        using (var bitmap = new Bitmap(resizedImage.Image))
                        {
                            GetData(bitmap, proto);
                            request.Inputs.Add(ConfigurationManager.AppSettings["ModelInputName"], proto);
                            var result   = client.Predict(request);
                            var response = JsonConvert.DeserializeObject <ResponseObject>(result.Outputs.ToString());

                            var dogProbability = (float)response.outputs.floatVal[1];
                            var catProbability = (float)response.outputs.floatVal[0];

                            return(dogProbability, catProbability);
                        }
                    }
                }
            }
            finally
            {
                channel.ShutdownAsync().Wait();
            }
        }
예제 #8
0
        static void Main(string[] args)
        {
            var imageFile       = "test/input.bmp";
            var scoredImageFile = "test/output.bmp";
            var scoringServer   = "10.0.1.85:9000";

            if (args.Length == 3)
            {
                imageFile       = args[0];
                scoredImageFile = args[1];
                scoringServer   = args[2];
            }

            Stopwatch stopWatch = Stopwatch.StartNew();

            //Create gRPC Channel
            var channel = new Channel(scoringServer, ChannelCredentials.Insecure,
                                      new List <Grpc.Core.ChannelOption> {
                new ChannelOption(ChannelOptions.MaxReceiveMessageLength, int.MaxValue),
                new ChannelOption(ChannelOptions.MaxSendMessageLength, int.MaxValue)
            });
            var client = new PredictionService.PredictionServiceClient(channel);

            Console.WriteLine("Elapsed time {0} ms - gRPC Channel created", stopWatch.ElapsedMilliseconds);


            //Check available model
            var responce = client.GetModelMetadata(new GetModelMetadataRequest()
            {
                ModelSpec = new ModelSpec()
                {
                    Name = "model"
                },
                MetadataField = { "signature_def" }
            });

            Console.WriteLine($"Model Available: {responce.ModelSpec.Name} Ver.{responce.ModelSpec.Version}");

            Console.WriteLine("Elapsed time {0} ms - Model available", stopWatch.ElapsedMilliseconds);


            //Create prediction request
            var request = new PredictRequest()
            {
                ModelSpec = new ModelSpec()
                {
                    Name = "model", SignatureName = "predict_image"
                }
            };

            //Add image tensor
            using (Stream stream = new FileStream(imageFile, FileMode.Open))
            {
                request.Inputs.Add("image", TensorBuilder.CreateTensorFromImage(stream, 1.0f));
            }
            Console.WriteLine("Elapsed time {0} ms - image tensor created", stopWatch.ElapsedMilliseconds);


            // Run the prediction
            var predictResponse = client.Predict(request);


            Console.WriteLine("Elapsed time {0} ms - prediction received", stopWatch.ElapsedMilliseconds);

            // Get predict output
            var scoredImage = predictResponse.Outputs["scored_image"];
            var image       = TensorBuilder.CreateImageBitmapFromTensor(scoredImage, 1.0f);

            image.Save(scoredImageFile);

            Console.WriteLine("Elapsed time {0} ms - image saved", stopWatch.ElapsedMilliseconds);
            stopWatch.Stop();

            Console.WriteLine("output saved");
        }
예제 #9
0
        /// <summary>
        /// Constructs batches and makes predictions in parallel
        /// </summary>
        /// <param name="predictionClient">Client to use for prediction</param>
        /// <param name="progressMonitor">Monitor to show progress in UI</param>
        /// <param name="progressStatus"/>
        /// <param name="settings">Settings to use for constructing inputs and outputs</param>
        /// <param name="inputs">List of inputs to predict</param>
        /// <param name="token">Token for cancelling prediction</param>
        /// <returns>Predictions from Prosit</returns>
        public TSkylineOutput PredictBatches(PredictionService.PredictionServiceClient predictionClient,
                                             IProgressMonitor progressMonitor, ref IProgressStatus progressStatus, SrmSettings settings, IList <TSkylineInputRow> inputs, CancellationToken token)
        {
            const int CONSTRUCTING_INPUTS_FRACTION = 50;

            progressMonitor.UpdateProgress(progressStatus = progressStatus
                                                            .ChangeMessage(PrositResources.PrositModel_BatchPredict_Constructing_Prosit_inputs)
                                                            .ChangePercentComplete(0));


            inputs = inputs.Distinct().ToArray();

            var processed  = 0;
            var totalCount = inputs.Count;

            var inputLock  = new object();
            var inputsList =
                new List <TPrositIn>();
            var validInputsList =
                new List <List <TSkylineInputRow> >();

            // Construct batch inputs in parallel
            var localProgressStatus = progressStatus;

            ParallelEx.ForEach(PrositHelpers.EnumerateBatches(inputs, PrositConstants.BATCH_SIZE),
                               batchEnumerable =>
            {
                var batch = batchEnumerable.ToArray();

                var batchInputs        = new List <TPrositInputRow>(batch.Length);
                var validSkylineInputs = new List <TSkylineInputRow>(batch.Length);

                foreach (var singleInput in batch)
                {
                    var input = CreatePrositInputRow(settings, singleInput, out _);
                    if (input != null)
                    {
                        batchInputs.Add(input);
                        validSkylineInputs.Add(singleInput);
                    }
                }

                lock (inputLock)
                {
                    inputsList.Add(CreatePrositInput(batchInputs));
                    validInputsList.Add(validSkylineInputs);

                    // ReSharper disable AccessToModifiedClosure
                    processed += batch.Length;
                    progressMonitor.UpdateProgress(localProgressStatus.ChangePercentComplete(CONSTRUCTING_INPUTS_FRACTION * processed / totalCount));
                    // ReSharper enable AccessToModifiedClosure
                }
            });

            processed  = 0;
            totalCount = inputsList.Sum(pi => pi.InputRows.Count);

            const int REQUESTING_INPUTS_FRACTION = 100 - CONSTRUCTING_INPUTS_FRACTION;

            progressStatus = progressStatus
                             .ChangeMessage(PrositResources.PrositModel_BatchPredict_Requesting_predictions_from_Prosit)
                             .ChangePercentComplete(CONSTRUCTING_INPUTS_FRACTION);
            progressMonitor.UpdateProgress(progressStatus);

            // Make predictions batch by batch in sequence and merge the outputs
            var prositOutputAll = new TPrositOut();

            foreach (var prositIn in inputsList)
            {
                var prositOutput = Predict(predictionClient, prositIn, token);
                prositOutputAll = prositOutputAll.MergeOutputs(prositOutput);

                processed     += prositIn.InputRows.Count;
                progressStatus = progressStatus.ChangeMessage(TextUtil.SpaceSeparate(
                                                                  PrositResources.PrositModel_BatchPredict_Requesting_predictions_from_Prosit,
                                                                  processed.ToString(), @"/", totalCount.ToString()))
                                 .ChangePercentComplete(CONSTRUCTING_INPUTS_FRACTION +
                                                        REQUESTING_INPUTS_FRACTION * processed / totalCount);
                progressMonitor.UpdateProgress(progressStatus);
            }

            return(CreateSkylineOutput(settings, validInputsList.SelectMany(i => i).ToArray(), prositOutputAll));
        }
예제 #10
0
 public void Open()
 {
     _channel = new Channel(_host, ChannelCredentials.Insecure);
     _client  = new PredictionService.PredictionServiceClient(_channel);
 }
예제 #11
0
        public static PredictResponse ImageDetectionRequest(string image_url, out int height, out int width)
        {
            //Create gRPC Channel
            var channel = new Channel(scoringServer, ChannelCredentials.Insecure,
                                      new List <Grpc.Core.ChannelOption> {
                new ChannelOption(ChannelOptions.MaxReceiveMessageLength, int.MaxValue),
                new ChannelOption(ChannelOptions.MaxSendMessageLength, int.MaxValue)
            });
            var client = new PredictionService.PredictionServiceClient(channel);

            //Create prediction request
            var request = new PredictRequest()
            {
                ModelSpec = new ModelSpec()
                {
                    Name = "ssd", SignatureName = "serving_default"
                }
            };

            //Add image tensor
            WebClient wc = new WebClient();

            byte[] data     = wc.DownloadData(image_url);
            Stream stream   = new MemoryStream(wc.DownloadData(image_url));
            var    dimArray = ImageUtils.ConvertImageStreamToDimArrays(stream);

            height = dimArray.Length;
            width  = dimArray[0].Length;
            var channels = dimArray[0][0].Length;

            var imageTensorBuilder = new TensorProto();
            var imageFeatureShape  = new TensorShapeProto();

            imageFeatureShape.Dim.Add(new TensorShapeProto.Types.Dim()
            {
                Size = 1
            });
            imageFeatureShape.Dim.Add(new TensorShapeProto.Types.Dim()
            {
                Size = height
            });
            imageFeatureShape.Dim.Add(new TensorShapeProto.Types.Dim()
            {
                Size = width
            });
            imageFeatureShape.Dim.Add(new TensorShapeProto.Types.Dim()
            {
                Size = channels
            });

            imageTensorBuilder.Dtype       = DataType.DtUint8;
            imageTensorBuilder.TensorShape = imageFeatureShape;
            for (int i = 0; i < height; ++i)
            {
                for (int j = 0; j < width; ++j)
                {
                    for (int c = 0; c < channels; c++)
                    {
                        //imageTensorBuilder.FloatVal.Add(dimArray[i][j][c] / revertsBits);
                        imageTensorBuilder.IntVal.Add(dimArray[i][j][c]);
                    }
                }
            }
            request.Inputs.Add("inputs", imageTensorBuilder);

            //using (Stream stream = new MemoryStream(wc.DownloadData(image_url)))
            //{
            //    request.Inputs.Add("inputs", TensorBuilder.CreateTensorFromImage(stream, 1.0f));
            //}

            // Run the prediction
            var predictResponse = client.Predict(request);

            //Console.WriteLine(predictResponse.Outputs["detection_classes"]);
            //Console.WriteLine(predictResponse.Outputs["detection_boxes"]);
            //Console.WriteLine(predictResponse.Outputs["detection_scores"]);
            return(predictResponse);
        }
 public ScoringClient(PredictionService.PredictionServiceClient client)
 {
     _client = client;
 }