private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo, NNFWType FWType, HWType HWType, bool IsDynamicMode) { NNStreamerError ret = NNStreamerError.None; IntPtr input_info = IntPtr.Zero; IntPtr output_info = IntPtr.Zero; /* Check model path */ if (string.IsNullOrEmpty(modelAbsPath)) { ret = NNStreamerError.InvalidParameter; } NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath); /* Set Dynamic Mode */ _dynamicMode = IsDynamicMode; if (inTensorInfo != null) { input_info = inTensorInfo.GetTensorsInfoHandle(); _inInfo = inTensorInfo; } if (outTensorInfo != null) { output_info = outTensorInfo.GetTensorsInfoHandle(); _outInfo = outTensorInfo; } ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, FWType, HWType); NNStreamer.CheckException(ret, "fail to open the single inference engine"); }
/// <summary> /// Loads the neural network model and configures runtime environment with Neural Network Framework and HW information /// </summary> /// <param name="modelAbsPath">Absolute path to the neural network model file.</param> /// <param name="inTensorsInfo">Input TensorsInfo object</param> /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param> /// <param name="fwType">Types of Neural Network Framework</param> /// <param name="hwType">Types of hardware resources to be used for NNFWs</param> /// <param name="isDynamicMode">Support Dynamic Mode</param> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 8 </since_tizen> public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo, NNFWType fwType, HWType hwType, bool isDynamicMode) { NNStreamer.CheckNNStreamerSupport(); if (inTensorsInfo == null || outTensorsInfo == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null"); } CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, fwType, hwType, isDynamicMode); }
/// <summary> /// Loads the neural network model and configures runtime environment without TensorsInfo /// </summary> /// <param name="modelAbsPath">Absolute path to the neural network model file.</param> /// <param name="fwType">Types of Neural Network Framework (Default:NNFWType.Any)</param> /// <param name="hwType">Types of hardware resources to be used for NNFWs (Default: HWType.Any)</param> /// <param name="isDynamicMode">Support Dynamic Mode (Default: false)</param> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 8 </since_tizen> public SingleShot(string modelAbsPath, NNFWType fwType = NNFWType.Any, HWType hwType = HWType.Any, bool isDynamicMode = false) { NNStreamer.CheckNNStreamerSupport(); CreateSingleShot(modelAbsPath, null, null, fwType, hwType, isDynamicMode); }
internal static extern NNStreamerError CheckNNFWAvailability(NNFWType nnfw, HWType hw, out bool available);
internal static extern NNStreamerError OpenSingle(out IntPtr single_handle, string model_path, IntPtr input_info, IntPtr output_info, NNFWType nn_type, HWType hw_type);