/// <summary> /// Inferencing fastspeech tflite model by taking in text and converting them into spectogram /// </summary> /// <param name="text">input text</param> private float[,,] FastspeechInference(ref int[] inputIDs) { _fastspeechInterpreter.ResizeInputTensor(0, new int[2] { 1, inputIDs.Length }); _fastspeechInterpreter.ResizeInputTensor(1, new int[1] { 1 }); _fastspeechInterpreter.ResizeInputTensor(2, new int[1] { 1 }); _fastspeechInterpreter.AllocateTensors(); Array[] inputData = PrepareInput(ref inputIDs, ref speakerID, ref speedRatio); for (int d = 0; d < inputData.Length; d++) { _fastspeechInterpreter.SetInputTensorData(d, inputData[d]); } _fastspeechInterpreter.Invoke(); int[] outputShape = _fastspeechInterpreter.GetOutputTensorInfo(1).shape; float[,,] outputData = new float[outputShape[0], outputShape[1], outputShape[2]]; _fastspeechInterpreter.GetOutputTensorData(1, outputData); return(outputData); }
/// <summary> /// Inferencing fastspeech tflite model by taking in text and converting them into spectogram /// </summary> /// <param name="text">input text</param> private float[,,] FastspeechInference(ref string text) { // resize the input tensors to fit the size of inputIDs int[] inputIDs = TextToSequence(text); _fastspeechInterpreter.ResizeInputTensor(0, new int[2] { 1, inputIDs.Length }); _fastspeechInterpreter.ResizeInputTensor(1, new int[1] { 1 }); _fastspeechInterpreter.ResizeInputTensor(2, new int[1] { 1 }); // allocate tensors and set input data _fastspeechInterpreter.AllocateTensors(); Array[] inputData = PrepareInput(ref inputIDs, ref speakerID, ref speedRatio); for (int d = 0; d < 3; d++) { _fastspeechInterpreter.SetInputTensorData(d, inputData[d]); } // run the interpreter _fastspeechInterpreter.Invoke(); // obtaining the output from the model int[] outputShape = _fastspeechInterpreter.GetOutputTensorInfo(1).shape; float[,,] outputData = new float[outputShape[0], outputShape[1], outputShape[2]]; _fastspeechInterpreter.GetOutputTensorData(1, outputData); return(outputData); }
void Update() { if (inputs == null) { return; } if (outputs == null || outputs.Length != inputs.Length) { interpreter.ResizeInputTensor(0, new int[] { inputs.Length }); interpreter.AllocateTensors(); outputs = new float[inputs.Length]; } float startTimeSeconds = Time.realtimeSinceStartup; interpreter.SetInputTensorData(0, inputs); interpreter.Invoke(); interpreter.GetOutputTensorData(0, outputs); float inferenceTimeSeconds = Time.realtimeSinceStartup - startTimeSeconds; inferenceText.text = string.Format( "Inference took {0:0.0000} ms\nInput(s): {1}\nOutput(s): {2}", inferenceTimeSeconds * 1000.0, ArrayToString(inputs), ArrayToString(outputs)); }
private void ImportGraph() { if (_interpreter != null) { _interpreter.Dispose(); } #if UNITY_EDITOR || UNITY_IOS || UNITY_ANDROID || UNITY_STANDALONE UnityEngine.Debug.Log("Reading model definition"); #endif String modelFileName = _downloadManager.Files[0].LocalFile; String labelFileName = _downloadManager.Files[1].LocalFile; System.Diagnostics.Debug.Assert(File.Exists(modelFileName), String.Format("File {0} doesn't exist", modelFileName)); System.Diagnostics.Debug.Assert(File.Exists(labelFileName), String.Format("File {0} doesn't exist", labelFileName)); if (!File.Exists(modelFileName) || !File.Exists(labelFileName)) { return; } if (_labels == null) { _labels = File.ReadAllLines(labelFileName); } if (_model == null) { _model = new FlatBufferModel(modelFileName); if (!_model.CheckModelIdentifier()) { throw new Exception("Model identifier check failed"); } } if (_interpreter == null) { _interpreter = new Interpreter(_model); Status allocateTensorStatus = _interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } } if (_inputTensor == null) { int[] input = _interpreter.InputIndices; _inputTensor = _interpreter.GetTensor(input[0]); } if (_outputTensor == null) { int[] output = _interpreter.OutputIndices; _outputTensor = _interpreter.GetTensor(output[0]); } }
private static String GetModelInfo(String fileName) { StringBuilder modelResult = new StringBuilder(); try { modelResult.Append(String.Format("File Name:{0}{1}", fileName, Environment.NewLine)); //string contents = System.Text.Encoding.UTF8.GetString(fileData.DataArray); using (FlatBufferModel fbm = new FlatBufferModel(fileName)) { if (!fbm.CheckModelIdentifier()) { throw new Exception("Model identifier check failed"); } using (Interpreter interpreter = new Interpreter(fbm)) { Status allocateTensorStatus = interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } int[] input = interpreter.InputIndices; for (int i = 0; i < input.Length; i++) { Tensor inputTensor = interpreter.GetTensor(input[i]); modelResult.Append(String.Format("Input {0} ({1}): {2}{3}{4}", i, inputTensor.Name, inputTensor.Type, IntArrayToString(inputTensor.Dims), Environment.NewLine)); } int[] output = interpreter.OutputIndices; for (int i = 0; i < output.Length; i++) { Tensor outputTensor = interpreter.GetTensor(output[i]); modelResult.Append(String.Format("Output {0} ({1}): {2}{3}{4}", i, outputTensor.Name, outputTensor.Type, IntArrayToString(outputTensor.Dims), Environment.NewLine)); } } } return(modelResult.ToString()); } catch (Exception ex) { modelResult.Append(String.Format("Exception processing file {0}: {1}{2} ", fileName, ex.ToString(), Environment.NewLine)); return(modelResult.ToString()); } }
void Start() { var options = new Interpreter.Options() { threads = 2, gpuDelegate = null, }; interpreter = new Interpreter(FileUtil.LoadFile(fileName), options); interpreter.ResizeInputTensor(0, new int[] { 1, 28, 28, 1 }); interpreter.AllocateTensors(); inputBuffer = new ComputeBuffer(28 * 28, sizeof(float)); }
public bool Initialize(Stream modelData, bool useNumThreads) { using (var builder = new TextRecognizer.Builder(MainActivity.context)) { txtRecognizer = builder.Build(); } using (var ms = new MemoryStream()) { modelData.CopyTo(ms); model = new FlatBufferModel(ms.ToArray()); } if (!model.CheckModelIdentifier()) { return(false); } var op = new BuildinOpResolver(); interpreter = new Interpreter(model, op); if (useNumThreads) { interpreter.SetNumThreads(Environment.ProcessorCount); } var allocateTensorStatus = interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { return(false); } var input = interpreter.GetInput(); inputTensor = interpreter.GetTensor(input[0]); var output = interpreter.GetOutput(); var outputIndex = output[0]; outputTensors = new Tensor[output.Length]; for (var i = 0; i < output.Length; i++) { outputTensors[i] = interpreter.GetTensor(outputIndex + i); } return(true); }
void LateUpdate() { if (inputs == null) { return; } if (outputs == null) { interpreter.ResizeInputTensor(0, new int[] { inputs.Length }); interpreter.AllocateTensors(); outputs = new float[componentList.Count * 4 + 3]; } if (interpreter != null) { float startTimeSeconds = Time.realtimeSinceStartup; interpreter.SetInputTensorData(0, inputs); interpreter.Invoke(); interpreter.GetOutputTensorData(0, outputs); float inferenceTimeSeconds = Time.realtimeSinceStartup - startTimeSeconds; inferenceText.text = string.Format( "Inference took {0:0.0000} ms\nInput(s): {1}", inferenceTimeSeconds * 1000.0, ArrayToString(inputs) ); for (int i = 0; i < componentList.Count; i++) { if (i == 0) { componentList[0].transform.position = new Vector3( vmean[0] + outputs[0], vmean[1] + outputs[1], vmean[2] + outputs[2] ); } componentList[i].transform.rotation = new Quaternion( vmean[i * 4 + 0 + 3] + outputs[i * 4 + 0 + 3], vmean[i * 4 + 1 + 3] + outputs[i * 4 + 1 + 3], vmean[i * 4 + 2 + 3] + outputs[i * 4 + 2 + 3], vmean[i * 4 + 3 + 3] + outputs[i * 4 + 3 + 3] ); } } }
void Start() { var options = new InterpreterOptions() { threads = 4, useNNAPI = false, }; interpreter = new Interpreter(FileUtil.LoadFile(fileName), options); interpreter.ResizeInputTensor(0, new int[] { 1, 512, 512, 3 }); interpreter.AllocateTensors(); inputBuffer = new ComputeBuffer(512 * 512 * 3, sizeof(float)); Slider = GameObject.Find("Slider"); }
private void InvokeInterpreter(int[] colors, Interpreter interpreter) { if (useNumThreads) { interpreter.SetNumThreads(Environment.ProcessorCount); } var allocateTensorStatus = interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } var input = interpreter.GetInput(); using (var inputTensor = interpreter.GetTensor(input[0])) { CopyColorsToTensor(inputTensor.DataPointer, colors); var watchInvoke = Stopwatch.StartNew(); interpreter.Invoke(); watchInvoke.Stop(); Console.WriteLine($"InterpreterInvoke: {watchInvoke.ElapsedMilliseconds}ms"); } var output = interpreter.GetOutput(); var outputIndex = output[0]; var outputTensors = new Tensor[output.Length]; for (var i = 0; i < output.Length; i++) { outputTensors[i] = interpreter.GetTensor(outputIndex + i); } var detection_boxes_out = outputTensors[0].GetData() as float[]; var detection_classes_out = outputTensors[1].GetData() as float[]; var detection_scores_out = outputTensors[2].GetData() as float[]; var num_detections_out = outputTensors[3].GetData() as float[]; var numDetections = num_detections_out[0]; LogDetectionResults(detection_classes_out, detection_scores_out, detection_boxes_out, (int)numDetections); }
/// <summary> /// Inferencing melgan tflite model by converting spectogram to audio /// </summary> /// <param name="spectogram">input spectogram</param> /// <returns></returns> private float[,,] MelganInference(ref float[,,] spectogram) { _melganInterpreter.ResizeInputTensor(0, new int[3] { spectogram.GetLength(0), spectogram.GetLength(1), spectogram.GetLength(2) }); _melganInterpreter.AllocateTensors(); _melganInterpreter.SetInputTensorData(0, spectogram); _melganInterpreter.Invoke(); int[] outputShape = _melganInterpreter.GetOutputTensorInfo(0).shape; float[,,] outputData = new float[outputShape[0], outputShape[1], outputShape[2]]; _melganInterpreter.GetOutputTensorData(0, outputData); return(outputData); }
private char[] DeepspeechInference(ref float[] inputStream) { _deepspeechInterpreter.ResizeInputTensor(0, new int[1] { inputStream.Length }); _deepspeechInterpreter.AllocateTensors(); Array[] inputData = PrepareInput(ref inputStream); for (int d = 0; d < inputData.Length; d++) { _deepspeechInterpreter.SetInputTensorData(d, inputData[d]); } _deepspeechInterpreter.Invoke(); int[] outputShape = _deepspeechInterpreter.GetOutputTensorInfo(0).shape; char[] outputData = new char[outputShape[0]]; _deepspeechInterpreter.GetOutputTensorData(0, outputData); return(outputData); }
public MLModel(string modelFilename) { if (modelFilename == null) { throw new ArgumentNullException(nameof(modelFilename)); } if (!File.Exists(modelFilename)) { throw new Exception(modelFilename + " does not exist."); } var _model = new FlatBufferModel(modelFilename); if (!_model.CheckModelIdentifier()) { throw new Exception("Model identifier check failed"); } _interpreter = new Interpreter(_model); Status allocateTensorStatus = _interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } if (_inputTensor == null) { int[] input = _interpreter.InputIndices; _inputTensor = _interpreter.GetTensor(input[0]); } if (_outputTensor == null) { int[] output = _interpreter.OutputIndices; _outputTensor = _interpreter.GetTensor(output[0]); } }
//bool _isStoppedDrag = true; //bool _hasPrinted = false; public void Start() { shouldClearText = true; var options = new Interpreter.Options() { threads = 2, gpuDelegate = null, }; interpreter = new Interpreter(FileUtil.LoadFile(fileName), options); interpreter.ResizeInputTensor(0, new int[] { 1, 28, 28, 1 }); interpreter.AllocateTensors(); inputBuffer = new ComputeBuffer(28 * 28, sizeof(float)); //_hasPrinted = false; //GameObject inputImage = GameObject.Find("Input Image"); //LineDrawer lineDrawer = inputImage.GetComponent<LineDrawer>(); //_isStoppedDrag = lineDrawer.isStoppedDrag; }
void Update() { if (inputs == null) { return; } if (outputs == null || outputs.Length != inputs.Length) { interpreter.ResizeInputTensor(0, new int[] { inputs.Length }); interpreter.AllocateTensors(); outputs = new float[inputs.Length]; } interpreter.SetInputTensorData(0, inputs); interpreter.Invoke(); interpreter.GetOutputTensorData(0, outputs); Debug.LogFormat("Input: {0}, Output: {1}", ArrayToString(inputs), ArrayToString(outputs)); }
/// <summary> /// Inferencing melgan tflite model by converting spectogram to audio /// </summary> /// <param name="spectogram">input spectogram</param> /// <returns></returns> private float[,,] MelganInference(ref float[,,] spectogram) { // resize input tensors to fit the size of spectogram _melganInterpreter.ResizeInputTensor(0, new int[3] { spectogram.GetLength(0), spectogram.GetLength(1), spectogram.GetLength(2) }); // allocate tensors and set input data _melganInterpreter.AllocateTensors(); _melganInterpreter.SetInputTensorData(0, spectogram); // run the interpreter _melganInterpreter.Invoke(); // obtaining the output from the model int[] outputShape = _melganInterpreter.GetOutputTensorInfo(0).shape; float[,,] outputData = new float[outputShape[0], outputShape[1], outputShape[2]]; _melganInterpreter.GetOutputTensorData(0, outputData); return(outputData); }
private void onDownloadCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e) { String localFileName = _downloadManager.Files[0].LocalFile; if (_labels == null) { _labels = File.ReadAllLines(_downloadManager.Files[1].LocalFile); } System.Diagnostics.Debug.Assert(File.Exists(localFileName), "File doesn't exist"); FileInfo file = new FileInfo(localFileName); if (_model == null) { _model = new FlatBufferModel(localFileName); if (!_model.CheckModelIdentifier()) { throw new Exception("Model indentifier check failed"); } } if (_resolver == null) { _resolver = new BuildinOpResolver(); } if (_interpreter == null) { _interpreter = new Interpreter(_model, _resolver); Status allocateTensorStatus = _interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } } }
private void ImportGraph(IDelegate optDelegate = null) { #if UNITY_EDITOR || UNITY_IOS || UNITY_ANDROID || UNITY_STANDALONE UnityEngine.Debug.Log("Reading model definition"); #endif if (_labels == null) { String labelFileName = _downloadManager.Files[1].LocalFile; System.Diagnostics.Debug.Assert(File.Exists(labelFileName), String.Format("File {0} doesn't exist", labelFileName)); if (!File.Exists(labelFileName)) { throw new Exception("Label file does not exist!"); } _labels = File.ReadAllLines(labelFileName); } if (_model == null) { String modelFileName = _downloadManager.Files[0].LocalFile; System.Diagnostics.Debug.Assert(File.Exists(modelFileName), String.Format("File {0} doesn't exist", modelFileName)); if (!File.Exists(modelFileName)) { throw new Exception("Model file does not exist!"); } _model = new FlatBufferModel(modelFileName); if (!_model.CheckModelIdentifier()) { throw new Exception("Model identifier check failed"); } } if (_interpreter == null) { _interpreter = new Interpreter(_model); bool isAndroid = false; #if UNITY_ANDROID && (!UNITY_EDITOR) isAndroid = true; #else System.Reflection.Assembly monoAndroidAssembly = Emgu.TF.Util.Toolbox.FindAssembly("Mono.Android.dll"); if (monoAndroidAssembly != null) { isAndroid = true; } #endif if (isAndroid) { //_interpreter.ModifyGraphWithDelegate(TfLiteInvoke.DefaultGpuDelegateV2); _interpreter.ModifyGraphWithDelegate(TfLiteInvoke.DefaultNnApiDelegate); //_interpreter.UseNNAPI(false); _interpreter.SetNumThreads(4); } //_interpreter.Build(_model); if (optDelegate != null) { _interpreter.ModifyGraphWithDelegate(optDelegate); } Status allocateTensorStatus = _interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } } if (_inputTensor == null) { _inputTensor = _interpreter.Inputs[0]; } if (_outputTensors == null) { _outputTensors = _interpreter.Outputs; } }
private void onDownloadCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e) { String localFileName = _downloadManager.Files[0].LocalFile; if (_labels == null) { _labels = File.ReadAllLines(_downloadManager.Files[1].LocalFile); } System.Diagnostics.Debug.Assert(File.Exists(localFileName), "File doesn't exist"); FileInfo file = new FileInfo(localFileName); if (_model == null) { _model = new FlatBufferModel(localFileName); if (!_model.CheckModelIdentifier()) { throw new Exception("Model indentifier check failed"); } } if (_resolver == null) { _resolver = new BuildinOpResolver(); } if (_interpreter == null) { _interpreter = new Interpreter(_model, _resolver); Status allocateTensorStatus = _interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } } int[] input = _interpreter.GetInput(); int[] output = _interpreter.GetOutput(); Tensor inputTensor = _interpreter.GetTensor(input[0]); Tensor outputTensor = _interpreter.GetTensor(output[0]); NativeImageIO.ReadImageFileToTensor(_image[0], inputTensor.DataPointer, 224, 224, 128.0f, 1.0f / 128.0f); Stopwatch watch = Stopwatch.StartNew(); _interpreter.Invoke(); watch.Stop(); float[] probability = outputTensor.GetData() as float[]; String resStr = String.Empty; if (probability != null) { float maxVal = 0; int maxIdx = 0; for (int i = 0; i < probability.Length; i++) { if (probability[i] > maxVal) { maxVal = probability[i]; maxIdx = i; } } resStr = String.Format("Object is {0} with {1}% probability. Recognition completed in {2} milliseconds.", _labels[maxIdx], maxVal * 100, watch.ElapsedMilliseconds); } SetImage(_image[0]); SetMessage(resStr); }
private void ImportGraph() { #if UNITY_EDITOR || UNITY_IOS || UNITY_ANDROID || UNITY_STANDALONE UnityEngine.Debug.Log("Reading model definition"); #endif if (_labels == null) { String labelFileName = _downloadManager.Files[1].LocalFile; if (!File.Exists(labelFileName)) { throw new Exception(String.Format("File {0} doesn't exist", labelFileName)); } _labels = File.ReadAllLines(labelFileName); } if (_model == null) { String modelFileName = _downloadManager.Files[0].LocalFile; if (!File.Exists(modelFileName)) { throw new Exception(String.Format("File {0} doesn't exist", modelFileName)); } _model = new FlatBufferModel(modelFileName); if (!_model.CheckModelIdentifier()) { throw new Exception("Model identifier check failed"); } } if (_interpreter == null) { _interpreter = new Interpreter(_model); /* * using (NNAPIDelegate d = new NNAPIDelegate()) * { * if (d.IsSupported) * { * _interpreter.UseNNAPI(true); * } * }*/ Status allocateTensorStatus = _interpreter.AllocateTensors(); if (allocateTensorStatus == Status.Error) { throw new Exception("Failed to allocate tensor"); } } if (_inputTensor == null) { int[] input = _interpreter.InputIndices; _inputTensor = _interpreter.GetTensor(input[0]); } if (_outputTensor == null) { int[] output = _interpreter.OutputIndices; _outputTensor = _interpreter.GetTensor(output[0]); } }
/// <summary> /// Initializes the node to process the /// </summary> /// <param name="path">The path to the model file</param> /// <param name="inputName">The name of the input to use</param> /// <param name="outputName">The name of the output to use</param> /// <param name="labels">The labels used for each of the classification /// steps.</param> /// <param name="numColumns">The number of columns in the grid of /// classifications.</param> /// <param name="numRows">The number of rows in the grid of classifications.</param> public Classifier_TFLite(string path, string inputName, string outputName, IReadOnlyList <string> labels, int numColumns = 1, int numRows = 1) { // Load the flatbuffer model definition for the TFlite model flatBufferModel = new FlatBufferModel(path); if (!flatBufferModel.CheckModelIdentifier()) { throw new Exception("Model identifier check failed"); } // Set up the TFLite interpreter to allow using the model interpreter = new Interpreter(flatBufferModel); var status = interpreter.AllocateTensors(); if (status == Status.Error) { throw new Exception("Failed to allocate tensor"); } // Get the inputs and outputs from the model // Find the input foreach (var tensor in interpreter.Inputs) { InputTensor = tensor; // Does the specified input match the name of this tensor? if (inputName == tensor.Name) { // Yes, they match break; } } // Find the output foreach (var tensor in interpreter.Outputs) { OutputTensor = tensor; // Does the specified output match the name of this tensor? if (outputName == tensor.Name) { // Yes, they match break; } } // Get the size from input dimensions // If the number of elements in Dims is 3, they are: // 0: the height // 1: the width // 2: the number of channels (ie 3 for RGB) // If the number of elements in Dims is 4, then: // 0: the number of images passed // 1: the height // 2: the width // 3: the number of channels (ie 3 for RGB) var dim = InputTensor.Dims; var dimOfs = 0; if (dim.Length > 3) { dimOfs = 1; } var height = dim[dimOfs + 0]; var width = dim[dimOfs + 1]; inputSize = new Size(width, height); // Get whether it is a float, int, etc inputType = InputTensor.Type; if (inputType != DataType.Float32 && inputType != DataType.UInt8) { throw new Exception(String.Format($"Data Type of {inputType} is not supported.")); } this.labels = labels; isLocalization = numColumns * numRows != 1; if (!isLocalization) { // Create an array to be sorted indices = new int[labels.Count]; for (var idx = 0; idx < indices.Length; idx++) { indices[idx] = idx; } } // Allocate the grid of regions that are classified this.numColumns = numColumns; this.numRows = numRows; outputGrid = new Classification[isLocalization?numRows:(labels.Count < 5?numRows:5), numColumns]; }