public Interpreter(byte[] modelData, InterpreterOptions options) { modelDataHandle = GCHandle.Alloc(modelData, GCHandleType.Pinned); IntPtr modelDataPtr = modelDataHandle.AddrOfPinnedObject(); model = TfLiteModelCreate(modelDataPtr, modelData.Length); if (model == IntPtr.Zero) throw new Exception("Failed to create TensorFlowLite Model"); this.options = options ?? new InterpreterOptions(); interpreter = TfLiteInterpreterCreate(model, options.nativePtr); if (interpreter == IntPtr.Zero) throw new Exception("Failed to create TensorFlowLite Interpreter"); }
public SmartReply(string modelPath, String[] responses) { this.responses = responses; // No GPU var options = new InterpreterOptions() { threads = 2 }; interpreter = new Interpreter(FileUtil.LoadFile(modelPath), options); interpreter.LogIOInfo(); // interpreter.AllocateTensors(); }
public Bert(string modelPath, string vocabText) { var options = new InterpreterOptions() { threads = 2, }; interpreter = new Interpreter(FileUtil.LoadFile(modelPath), options); interpreter.LogIOInfo(); inputs0 = new int[MAX_SEQ_LENTH]; inputs1 = new int[MAX_SEQ_LENTH]; inputs2 = new int[MAX_SEQ_LENTH]; outputs0 = new float[MAX_SEQ_LENTH]; outputs1 = new float[MAX_SEQ_LENTH]; interpreter.AllocateTensors(); vocabularyTable = LoadVocabularies(vocabText); }
public TextClassification(string modelPath, string vocabularyText) { vocabulary = BuildVocabulary(vocabularyText); // NO GPU var options = new InterpreterOptions() { threads = 2 }; interpreter = new Interpreter(FileUtil.LoadFile(modelPath), options); var inputInfo = interpreter.GetInputTensorInfo(0); var outputInfo = interpreter.GetOutputTensorInfo(0); inputs = new float[inputInfo.shape[1]]; outputs = new float[outputInfo.shape[1]]; interpreter.ResizeInputTensor(0, inputInfo.shape); interpreter.AllocateTensors(); }