public void result(Tuple <DenseTensor <float>, string> input) { waitHandler.WaitOne(); var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("data", input.Item1) }; IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); // Получаем 1000 выходов и считаем для них softmax var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); // Выдаем 10 наиболее вероятных результатов на экран var t = softmax .Select((x, i) => new { Label = classLabels[i], Confidence = x }) .OrderByDescending(x => x.Confidence) .Take(1) .ToList(); lock (locker) { list_of_ans.Add(input.Item2.Substring(input.Item2.LastIndexOf('\\') + 1) + "-" + t[0].Label); } sem.Release(); }
private PredictionResult Predict_with_db(DenseTensor <float> input, string single_image_path) { var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("Input3", input) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); // Получаем 1000 выходов и считаем для них softmax var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); var confidence = softmax.Max(); var class_idx = softmax.ToList().IndexOf(confidence); PredictionResult res; if (CheckIfInDb(single_image_path, out res)) { return(res); } else { PredictionResult pred = new PredictionResult(single_image_path, LabelMap.ClassLabels[class_idx], confidence); AddToDb(pred); return(pred); } }
public async Task <bool> Infer(Tensor <float> input) { var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(TensorInputName, input) }; using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = await Task.Run(() => session.Run(inputs))) { IEnumerable <float> output = results.First().AsEnumerable <float>(); IEnumerable <Prediction> pred = output.Select((x, i) => new Prediction { Label = LabelMap.Labels[i], Confidence = x }) .OrderByDescending(x => x.Confidence); #if UNITY_EDITOR string predictions = ""; foreach (var t in pred) { predictions += $"Label: {t.Label}, Confidence: {t.Confidence}\t"; } //Debug.Log(predictions); #endif float bug_certainty = pred.First(p => p.Label == "bug").Confidence; float normal_certainty = pred.First(p => p.Label == "normal").Confidence; return(bug_certainty - normal_certainty > Treshold); //true is bug //return new Tuple<float, float>(normal_certainty, bug_certainty); } }
private Angle GetAngle(Mat src) { Angle angle = new Angle(); Mat angleImg = new Mat(); CvInvoke.Resize(src, angleImg, new Size(angleDstWidth, angleDstHeight)); Tensor <float> inputTensors = OcrUtils.SubstractMeanNormalize(angleImg, MeanValues, NormValues); var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(inputNames[0], inputTensors) }; try { using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = angleNet.Run(inputs)) { var resultsArray = results.ToArray(); Console.WriteLine(resultsArray); float[] outputData = resultsArray[0].AsEnumerable <float>().ToArray(); return(ScoreToAngle(outputData, angleCols)); } } catch (Exception ex) { Console.WriteLine(ex.Message + ex.StackTrace); //throw ex; } return(angle); }
public List <TextBox> GetTextBoxes(Mat src, ScaleParam scale, float boxScoreThresh, float boxThresh, float unClipRatio) { Mat srcResize = new Mat(); CvInvoke.Resize(src, srcResize, new Size(scale.DstWidth, scale.DstHeight)); Tensor <float> inputTensors = OcrUtils.SubstractMeanNormalize(srcResize, MeanValues, NormValues); var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(inputNames[0], inputTensors) }; try { using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = dbNet.Run(inputs)) { var resultsArray = results.ToArray(); Console.WriteLine(resultsArray); var textBoxes = GetTextBoxes(resultsArray, srcResize.Rows, srcResize.Cols, scale, boxScoreThresh, boxThresh, unClipRatio); return(textBoxes); } } catch (Exception ex) { Console.WriteLine(ex.Message + ex.StackTrace); } return(null); }
/// <exception cref="InstanceNotFoundException"/> /// <exception cref="OnnxRuntimeException"/> private Bitmap Resize(int modelId, byte upscaleFactor, Bitmap bitmap) { switch (modelId) { case NearestNeighbor: return(ResizeBitmap(bitmap, bitmap.Width * upscaleFactor, bitmap.Height * upscaleFactor, InterpolationMode.NearestNeighbor)); case Bilinear: return(ResizeBitmap(bitmap, bitmap.Width * upscaleFactor, bitmap.Height * upscaleFactor, InterpolationMode.Bilinear)); case Bicubic: return(ResizeBitmap(bitmap, bitmap.Width * upscaleFactor, bitmap.Height * upscaleFactor, InterpolationMode.Bicubic)); default: Tensor <float> tensor = Converter.ConvertBitmapToFloatTensor(bitmap); IReadOnlyCollection <NamedOnnxValue> input = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("input", tensor) }; InferenceSession session = GetSession(modelId, upscaleFactor); using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(input)) { Tensor <float> output = results.First().AsTensor <float>(); return(Converter.ConvertFloatTensorToBitmap(output)); } } }
/// <summary> /// Runs inference session. /// </summary> private DenseTensor <float>[] Inference(Image image) { Bitmap resized = null; if (image.Width != _model.Width || image.Height != _model.Height) { resized = ResizeImage(image); // fit image size to specified input size } var inputs = new List <NamedOnnxValue> // add image as onnx input { NamedOnnxValue.CreateFromTensor("images", ExtractPixels(resized ?? image)) }; IDisposableReadOnlyCollection <DisposableNamedOnnxValue> result = _inferenceSession.Run(inputs); // run inference var output = new List <DenseTensor <float> >(); foreach (var item in _model.Outputs) // add outputs for processing { output.Add(result.First(x => x.Name == item).Value as DenseTensor <float>); } ; return(output.ToArray()); }
private TextLine GetTextLine(Mat src) { TextLine textLine = new TextLine(); float scale = (float)crnnDstHeight / (float)src.Rows; int dstWidth = (int)((float)src.Cols * scale); Mat srcResize = new Mat(); CvInvoke.Resize(src, srcResize, new Size(dstWidth, crnnDstHeight)); Tensor <float> inputTensors = OcrUtils.SubstractMeanNormalize(srcResize, MeanValues, NormValues); var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(inputNames[0], inputTensors) }; try { using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = crnnNet.Run(inputs)) { var resultsArray = results.ToArray(); var dimensions = resultsArray[0].AsTensor <float>().Dimensions; float[] outputData = resultsArray[0].AsEnumerable <float>().ToArray(); return(ScoreToTextLine(outputData, dimensions[1], dimensions[2])); } } catch (Exception ex) { Console.WriteLine(ex.Message + ex.StackTrace); //throw ex; } return(textLine); }
/// <summary> /// Runs the loaded model for the given inputs, and fetches the outputs specified in <paramref name="outputNames"/>. /// </summary> /// <param name="inputs"></param> /// <param name="outputNames"></param> /// <returns>Output Tensors in a Collection of NamedOnnxValue. User must dispose the output.</returns> public IDisposableReadOnlyCollection <DisposableNamedOnnxValue> Run(IReadOnlyCollection <NamedOnnxValue> inputs, IReadOnlyCollection <string> outputNames) { IDisposableReadOnlyCollection <DisposableNamedOnnxValue> result = null; result = Run(inputs, outputNames, _builtInRunOptions); return(result); }
public static void processImage(string path) { try { using var image = Image.Load <Rgb24>(path); const int TargetWidth = 224; const int TargetHeight = 224; image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop }); }); var input = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth }); var mean = new[] { 0.485f, 0.456f, 0.406f }; var stddev = new[] { 0.229f, 0.224f, 0.225f }; for (int y = 0; y < TargetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0]; input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1]; input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2]; } } var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("input", input) }; using var session = new InferenceSession("..\\..\\..\\..\\ImageRecognition\\shufflenet-v2-10.onnx"); using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); lock (lockObj) { foreach (var p in softmax .Select((x, i) => new { Label = classLabels[i], Confidence = x }) .OrderByDescending(x => x.Confidence) .Take(1)) { predictionOutputs.Enqueue(new ImageResult(path, p.Label, p.Confidence)); } } } catch (Exception) { predictionOutputs.Enqueue(new ImageResult(path)); } }
static void Main(string[] args) { using var image = Image.Load <Rgb24>(args.FirstOrDefault() ?? "image.jpg"); const int TargetWidth = 224; const int TargetHeight = 224; // Изменяем размер картинки до 224 x 224 image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop // Сохраняем пропорции обрезая лишнее }); }); // Перевод пикселов в тензор и нормализация var input = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth }); var mean = new[] { 0.485f, 0.456f, 0.406f }; var stddev = new[] { 0.229f, 0.224f, 0.225f }; for (int y = 0; y < TargetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0]; input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1]; input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2]; } } // Подготавливаем входные данные нейросети. Имя input задано в файле модели var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("input", input) }; // Вычисляем предсказание нейросетью using var session = new InferenceSession("shufflenet-v2-10.onnx"); Console.WriteLine("Predicting contents of image..."); using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); // Получаем 1000 выходов и считаем для них softmax var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); // Выдаем 10 наиболее вероятных результатов на экран foreach (var p in softmax .Select((x, i) => new { Label = classLabels[i], Confidence = x }) .OrderByDescending(x => x.Confidence) .Take(10)) { Console.WriteLine($"{p.Label} with confidence {p.Confidence}"); } }
private void ImageProcess(Image <Rgb24> image, string path, byte[] blob) { const int targetHeight = 224; const int targetWidth = 224; image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(targetWidth, targetHeight), Mode = ResizeMode.Crop }); }); var input = new DenseTensor <float>(new[] { 1, 3, targetHeight, targetWidth }); var mean = new[] { 0.485f, 0.456f, 0.406f }; var stddev = new[] { 0.229f, 0.224f, 0.225f }; for (int y = 0; y < targetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < targetWidth; x++) { input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0]; input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1]; input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2]; } } var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("data", input) }; using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = Session.Run(inputs)) { var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); foreach (var p in softmax .Select((x, i) => new { Label = LabelClass.Labels[i], Confidence = x }) .OrderByDescending(x => x.Confidence) .Take(1)) { OutMutex.WaitOne(0); var result = new Result { Label = p.Label, Confidence = p.Confidence, Path = path, Blob = new ImageData { Data = blob } }; write(result); Postprocess(result); OutMutex.Set(); } } }
// New version private static ObjectInImageProbability ImageRecognition(string fileName, string stringImage) { const int TargetWidth = 224; const int TargetHeight = 224; // Image loading byte[] blob = Convert.FromBase64String(stringImage); using var image = Image.Load <Rgb24>(blob); // Changing picture size to targeted sizes image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop // Keeping proportions, cropping excess }); }); // Pixel to tensor convertion, normalization var input = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth }); var mean = new[] { 0.485f, 0.456f, 0.406f }; var stddev = new[] { 0.229f, 0.224f, 0.225f }; for (int y = 0; y < TargetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0]; input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1]; input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2]; } } // Neural network input data preparation var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("data", input) }; // Neural network probability computation using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); // Getting outputs var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); float probability = softmax.Max(); int classLabelsObjectsNumber = softmax.ToList().IndexOf(probability); probability *= 100.0f; string classLabel = classLabels[classLabelsObjectsNumber]; return(new ObjectInImageProbability(fileName, classLabel, probability)); }
//copy of text_accentAPI.py/__predict private string PredictInternal(string word, string wordWithContext) { if (wordWithContext.Length > MAXLEN) { return(word); //no support for such long words } DenseTensor <float> tensor = new DenseTensor <float>(this.m_inputShape); for (int i = 0; i < wordWithContext.Length; ++i) { char letter = wordWithContext[i]; int pos = MAXLEN - wordWithContext.Length + i; int charInd = this.m_charIndices[letter]; tensor[0, pos, charInd] = 1.0f; } IReadOnlyCollection <NamedOnnxValue> inputs = new List <NamedOnnxValue>() { NamedOnnxValue.CreateFromTensor <float>(this.m_session.InputMetadata.First().Key, tensor) }; float[] predictions; using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> result = this.m_session.Run(inputs)) { predictions = result.First().AsTensor <float>().ToArray(); } int bestIndexInWord = -1; float bestProb = float.MinValue; int checkIndexInProb = predictions.Length - 1; int checkIndexInWord = word.Length - 1; while (checkIndexInWord >= 0) { if (VOVELS.Contains(word[checkIndexInWord])) { if (predictions[checkIndexInProb] > bestProb) { bestProb = predictions[checkIndexInProb]; bestIndexInWord = checkIndexInWord; } } --checkIndexInWord; --checkIndexInProb; } if (bestIndexInWord < 0) { return(word); } return(word.Substring(0, bestIndexInWord + 1) + '\'' + word.Substring(bestIndexInWord + 1)); }
public PredictionValues FileRead(string ImagePath) { Image <Rgb24> image = Image.Load <Rgb24>(ImagePath); const int TargetWidth = 28; const int TargetHeight = 28; image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop }); x.Grayscale(); }); var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth }); for (int y = 0; y < TargetHeight; y++) { for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = image[x, y].R / 255f; } } var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("Input3", input) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); var output = results.First().AsEnumerable <float>(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); var preds = softmax.Select((x, i) => new Tuple <string, float>(LabelMap.ClassLabels[i], x)) .OrderByDescending(x => x.Item2) .Take(10); //var prediction = "\n"; var confidence = softmax.Max(); var index = softmax.ToList().IndexOf(confidence); //foreach (var (label, confidence) in preds.ToList()) //{ // prediction += $"Label: {label}, confidence: {confidence}\n"; //} return(new PredictionValues(ImagePath, LabelMap.ClassLabels[index], confidence)); }
public void ImageProcess(string imagePath, int tmp) { using var image = Image.Load <Rgb24>((string)imagePath ?? "image.jpg"); const int TargetWidth = 224; const int TargetHeight = 224; // Изменяем размер картинки до 224 x 224 image.Mutate(x => { x.Resize(new ResizeOptions { Size = new SixLabors.ImageSharp.Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop // Сохраняем пропорции обрезая лишнее }); }); // Перевод пикселов в тензор и нормализация var input = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth }); var mean = new[] { 0.485f, 0.456f, 0.406f }; var stddev = new[] { 0.229f, 0.224f, 0.225f }; for (int y = 0; y < TargetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0]; input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1]; input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2]; } } // Вычисляем предсказание нейросетью var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(Session.InputMetadata.Keys.First(), input) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = Session.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); int index = softmax.ToList().IndexOf(softmax.Max()); Notify?.Invoke(new PredictionResult((string)imagePath, classLabels[index]), new EventArgs(), false); }
public string PredictModel(DenseTensor <float> input) { var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(name_model.InputMetadata.Keys.First(), input) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = name_model.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); float sum = output.Sum(x => (float)Math.Exp(x)); var max = output.Select(x => (float)Math.Exp(x) / sum); var Labels_indx = max.ToList().IndexOf(max.Max()); return(LabelMap.Labels[Labels_indx]); }
private int PredictImage(Tensor <float> tensor) { var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("data", tensor) }; IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); return(softmax .Select((x, i) => new Tuple <int, float>(i, x)) .OrderByDescending(x => x.Item2) .Take(1).First().Item1); }
InferenceSession session = new InferenceSession("model.onnx");// Создает сессию для предсказания нейросетью; public string ProcessPicture(Bitmap bitmap) { const int TargetWidth = 28; const int TargetHeight = 28; // Перевод пикселов в тензор и нормализация DenseTensor<float> input = new DenseTensor<float>(new[] { 1, 1, TargetHeight, TargetWidth }); // Перевод картинки в ЧБ for (int y = 0; y < TargetHeight; y++) { for (int x = 0; x < TargetWidth; x++) { var color = bitmap.GetPixel(x, y); input[0, 0, y, x] = (color.R + color.G + color.B) / 3f / 255f; } } // Подготавливаем входные данные нейросети. Имя input задано в файле модели var inputs = new List<NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(session.InputMetadata.Keys.First(), input) }; IDisposableReadOnlyCollection<DisposableNamedOnnxValue> results = session.Run(inputs); ////Утяжеление задачи, чтобы проверить, как выполняются таски на ядрах int k = 1; for (int i = 1; i < 100000000; i++) k = k * i; // Получаем 1000 выходов и считаем для них softmax var output = results.First().AsEnumerable<float>().ToArray(); // Формула softmax из Википедии var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); // Выдаем наиболее вероятный результат на экран var result = softmax .Select((x, i) => new { Label = PictureInfo.classLabels[i], Confidence = x }) .OrderByDescending(x => x.Confidence) .FirstOrDefault(); return result.Label; }
static void Main(string[] args) { // data pre-processing // REMEMBER the tokenizer + vocab // see the project https://github.com/Microsoft/BlingFire var t = new long[] { 102, 4714, 395, 1538, 2692, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; // create the tensor var input = new DenseTensor <long>(new[] { 1, 128 }); for (int i = 0; i < 128; i++) { input[0, i] = t[i]; } var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("input_ids", input) }; // sentiments classification var modelFilePath = "c:\\temp\\BERTsentiment.onnx"; using var session = new InferenceSession(modelFilePath); using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); // show result var values = (DenseTensor <float>)results.First().Value; for (var i = 0; i < values.Length; i++) { Console.WriteLine(values[0, i]); } Console.ReadKey(); }
public string Run(InferenceSession session, Tensor <float> oneTensor) { var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("input", oneTensor) }; string res = string.Empty; using (IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs)) { // Postprocess to get predictions var resultsArray = results.ToArray(); res = Decode(resultsArray); } return(res); }
public string ProcessImage(string path) { Image image = Image.FromStream(new MemoryStream(Convert.FromBase64String(path))); const int TargetWidth = 224; const int TargetHeight = 224; var bitmap = ResizeImage(image, TargetWidth, TargetHeight); // Перевод пикселов в тензор и нормализация //var input = new Tensor<float>(); var input = new DenseTensor <float>(new[] { 1, 3, TargetHeight, TargetWidth }); var mean = new[] { 0.485f, 0.456f, 0.406f }; var stddev = new[] { 0.229f, 0.224f, 0.225f }; for (int y = 0; y < TargetHeight; y++) { for (int x = 0; x < TargetWidth; x++) { var color = bitmap.GetPixel(x, y); input[0, 0, y, x] = ((color.R / 255f) - mean[0]) / stddev[0]; input[0, 1, y, x] = ((color.G / 255f) - mean[1]) / stddev[1]; input[0, 2, y, x] = ((color.B / 255f) - mean[2]) / stddev[2]; } } // Подготавливаем входные данные нейросети. Имя input задано в файле модели var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("data", input) }; //Console.WriteLine("Predicting contents of image..."); using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); // Получаем 1000 выходов и считаем для них softmax var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); return(softmax .Select((x, i) => new { Label = classLabels[i], Confidence = x }) .OrderByDescending(x => x.Confidence).FirstOrDefault().Label); }
private static Prediction OneImgRecognition(string path) { using var image = Image.Load <Rgb24>(path); const int TargetWidth = 28; const int TargetHeight = 28; image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop, }); }); var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth }); var mean = new[] { 0.485f, 0.456f, 0.406f }; var stddev = new[] { 0.229f, 0.224f, 0.225f }; for (int y = 0; y < TargetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0]; } } var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("Input3", input), }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); float confidence = softmax.Max(); int label = softmax.ToList().IndexOf(confidence); return(new Prediction(path, label, confidence)); }
public int LoadAndPredict(Image <Rgb24> image) { // using var image = Image.Load<Rgb24>(img_name); const int TargetWidth = 28; const int TargetHeight = 28; image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop }).Grayscale(); }); var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth }); for (int y = 0; y < TargetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = pixelSpan[x].R / 255.0f; } } using var session = new InferenceSession(model_name); string input_name = session.InputMetadata.Keys.First(); var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(input_name, input) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); var query = softmax.Select((x, i) => new { Label = classLabels[i], Confidence = x }) .OrderByDescending(x => x.Confidence); return(Int32.Parse(query.First().Label)); }
public PredictionResult Predict(string ImgPath, byte[] img = null) { var input = (img == null)?ProcessImage(ImgPath):ProcessImage(img); // Вычисляем предсказание нейросетью var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(Session.InputMetadata.Keys.First(), input) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = Session.Run(inputs); // Получаем 1000 выходов и считаем для них softmax var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); return(new PredictionResult(classLabels[softmax.ToList().IndexOf(softmax.Max())], ImgPath, softmax.ToList().Max())); }
public ResultClassification PredictModel(string imageFilePath) { DenseTensor <float> TensorImage = OnnxClassifier.PreprocImage(imageFilePath); var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(session.InputMetadata.Keys.First(), TensorImage) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); float sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum).ToList(); string cl = LabelMap.Labels[softmax.IndexOf(softmax.Max())]; ResultClassification result = new ResultClassification(imageFilePath, cl, softmax.Max()); return(result); }
static List <ORTItem> PostProcessing(IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results) { List <ORTItem> itemList = new List <ORTItem>(); List <float[]> out_boxes = new List <float[]>(); List <float[]> out_scores = new List <float[]>(); List <int> out_classes = new List <int>(); var boxes = results.AsEnumerable().ElementAt(0).AsTensor <float>(); var scores = results.AsEnumerable().ElementAt(1).AsTensor <float>(); var indices = results.AsEnumerable().ElementAt(2).AsTensor <int>(); int nbox = indices.Count() / 3; for (int ibox = 0; ibox < nbox; ibox++) { out_classes.Add(indices[0, 0, ibox * 3 + 1]); float[] score = new float[80]; for (int j = 0; j < 80; j++) { score[j] = scores[indices[0, 0, ibox * 3 + 0], j, indices[0, 0, ibox * 3 + 2]]; } out_scores.Add(score); float[] box = new float[] { boxes[indices[0, 0, ibox * 3 + 0], indices[0, 0, ibox * 3 + 2], 0], boxes[indices[0, 0, ibox * 3 + 0], indices[0, 0, ibox * 3 + 2], 1], boxes[indices[0, 0, ibox * 3 + 0], indices[0, 0, ibox * 3 + 2], 2], boxes[indices[0, 0, ibox * 3 + 0], indices[0, 0, ibox * 3 + 2], 3] }; out_boxes.Add(box); //output ORTItem item = new ORTItem((int)box[1], (int)box[0], (int)(box[3] - box[1]), (int)(box[2] - box[0]), out_classes[ibox], cfg.Labels[out_classes[ibox]], out_scores[ibox][out_classes[ibox]], 0, "lineName"); itemList.Add(item); } return(itemList); }
public string Predict(string imagePath) { // Read image using (Image <Rgb24> image = Image.Load <Rgb24>(imagePath)) { // Resize image image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(150, 150), Mode = ResizeMode.Crop }); }); // Preprocess image Tensor <float> input = new DenseTensor <float>(new[] { 1, 3, 150, 150 }); for (int y = 0; y < image.Height; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < image.Width; x++) { input[0, 0, y, x] = (pixelSpan[x].R / 255f); input[0, 1, y, x] = (pixelSpan[x].G / 255f); input[0, 2, y, x] = (pixelSpan[x].B / 255f); } } // Setup inputs var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("input.1", input) }; IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = _session?.Run(inputs); float output = results.First().AsEnumerable <float>().First(); return((output >= 0.5) ? "Male" : "Female"); } }
public string Predict(string img) { using var image = Image.Load <Rgb24>(img); image.Mutate(x => { x.Resize(new ResizeOptions { Size = new Size(TargetWidth, TargetHeight), Mode = ResizeMode.Crop }); x.Grayscale(); }); var input = new DenseTensor <float>(new[] { 1, 1, TargetHeight, TargetWidth }); for (int y = 0; y < TargetHeight; y++) { Span <Rgb24> pixelSpan = image.GetPixelRowSpan(y); for (int x = 0; x < TargetWidth; x++) { input[0, 0, y, x] = pixelSpan[x].R / 255.0f; } } using var session = new InferenceSession(ModelFolder + ModelFile); var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor(session.InputMetadata.Keys.First(), input) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); return(classLabels[softmax.ToList().IndexOf(softmax.Max())]); }
public void result(Tuple <DenseTensor <float>, string> input) { var inputs = new List <NamedOnnxValue> { NamedOnnxValue.CreateFromTensor("data", input.Item1) }; using IDisposableReadOnlyCollection <DisposableNamedOnnxValue> results = session.Run(inputs); // Получаем 1000 выходов и считаем для них softmax var output = results.First().AsEnumerable <float>().ToArray(); var sum = output.Sum(x => (float)Math.Exp(x)); var softmax = output.Select(x => (float)Math.Exp(x) / sum); // Выдаем 10 наиболее вероятных результатов на экран foreach (var p in softmax .Select((x, i) => new { Label = classLabels[i], Confidence = x }) .OrderByDescending(x => x.Confidence) .Take(1)) { Console.WriteLine(input.Item2.Substring(input.Item2.LastIndexOf('\\') + 1) + " - " + p.Label + " confidence = " + p.Confidence); } }