/// <summary> /// Sets a tensor data to given index. /// </summary> /// <param name="index">The index of the tensor.</param> /// <param name="buffer">Raw tensor data to be set.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void SetTensorData(int index, byte[] buffer) { NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length); NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString()); }
/// <summary> /// Creates a TensorsInfo instance. /// </summary> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsInfo() { NNStreamer.CheckNNStreamerSupport(); Log.Info(NNStreamer.TAG, "TensorsInfo is created"); _infoList = new List <TensorInfo>(); }
private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo, NNFWType FWType, HWType HWType, bool IsDynamicMode) { NNStreamerError ret = NNStreamerError.None; IntPtr input_info = IntPtr.Zero; IntPtr output_info = IntPtr.Zero; /* Check model path */ if (string.IsNullOrEmpty(modelAbsPath)) { ret = NNStreamerError.InvalidParameter; } NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath); /* Set Dynamic Mode */ _dynamicMode = IsDynamicMode; if (inTensorInfo != null) { input_info = inTensorInfo.GetTensorsInfoHandle(); _inInfo = inTensorInfo; } if (outTensorInfo != null) { output_info = outTensorInfo.GetTensorsInfoHandle(); _outInfo = outTensorInfo; } ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, FWType, HWType); NNStreamer.CheckException(ret, "fail to open the single inference engine"); }
/// <summary> /// Calculates the byte size of tensor data. /// </summary> /// <param name="idx">The index of the tensor information in the list</param> /// <returns>The byte size of tensor</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 8 </since_tizen> public int GetTensorSize(int idx) { NNStreamer.CheckNNStreamerSupport(); CheckIndexBoundary(idx); return(_infoList[idx].Size); }
/// <summary> Sets the property value for the given model. /// <para>A model/framework may support changing the model information, such as tensor dimension and data layout, after opening the model.</para> /// <para>If tries to change unavailable property or the model does not allow changing the information, this will raise an exception.</para> /// <para>For the details about the properties, see 'tensor_filter' plugin definition in <a href="https://github.com/nnstreamer/nnstreamer">NNStreamer</a>.</para> /// </summary> /// <param name="name">The property name</param> /// <param name="value">The property value</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void SetValue(string name, string value) { NNStreamerError ret = NNStreamerError.None; NNStreamer.CheckNNStreamerSupport(); /* Check the argument */ if (string.IsNullOrEmpty(name)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid"); } if (string.IsNullOrEmpty(value)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property value is invalid"); } ret = Interop.SingleShot.SetValue(_handle, name, value); if (ret != NNStreamerError.None) { if (ret == NNStreamerError.NotSupported) { NNStreamer.CheckException(ret, "Failed to to set the property, the property name is not available."); } else { NNStreamer.CheckException(ret, "Failed to to set the property, the property value is invalid."); } } }
/// <summary> /// Creates new custom-filter with input and output tensors information. /// </summary> /// <param name="name">The name of custom-filter</param> /// <param name="inInfo">The input tensors information</param> /// <param name="outInfo">The output tensors information</param> /// <param name="filter">Delegate to be called while processing the pipeline</param> /// <returns>CustomFiter instance</returns> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public static CustomFilter Create(string name, TensorsInfo inInfo, TensorsInfo outInfo, Func <TensorsData, TensorsData> filter) { NNStreamer.CheckNNStreamerSupport(); return(new CustomFilter(name, inInfo, outInfo, filter)); }
/// <summary> /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo. /// </summary> /// <param name="name">Name of Tensor.</param> /// <param name="type">Data element type of Tensor.</param> /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SlzeLimit)</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void AddTensorInfo(string name, TensorType type, int[] dimension) { NNStreamer.CheckNNStreamerSupport(); int idx = _infoList.Count; if (idx >= Tensor.SizeLimit) { throw new IndexOutOfRangeException("Max size of the tensors is " + Tensor.SizeLimit); } _infoList.Add(new TensorInfo(name, type, dimension)); if (_handle != IntPtr.Zero) { NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count); NNStreamer.CheckException(ret, "unable to set the number of tensors"); ret = Interop.Util.SetTensorType(_handle, idx, type); NNStreamer.CheckException(ret, "fail to set TensorsInfo type"); ret = Interop.Util.SetTensorDimension(_handle, idx, dimension); NNStreamer.CheckException(ret, "fail to set TensorsInfo dimension"); } }
/// <summary> /// Gets the tensor dimension with given index. /// </summary> /// <param name="idx">The index of the tensor.</param> /// <returns>The tensor dimension.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public int[] GetDimension(int idx) { NNStreamer.CheckNNStreamerSupport(); CheckIndexBoundary(idx); return(_infoList[idx].Dimension); }
/// <summary> /// Gets the tensor name with given index. /// </summary> /// <param name="idx">The index of the tensor.</param> /// <returns>The tensor name</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public string GetTensorName(int idx) { NNStreamer.CheckNNStreamerSupport(); CheckIndexBoundary(idx); return(_infoList[idx].Name); }
/// <summary> /// Gets the tensor type with given index. /// </summary> /// <param name="idx">The index of the tensor.</param> /// <returns>The tensor type</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorType GetTensorType(int idx) { NNStreamer.CheckNNStreamerSupport(); CheckIndexBoundary(idx); return(_infoList[idx].Type); }
/// <summary> /// Gets the property value for the given model. /// </summary> /// <param name="name">The property name</param> /// <returns>The property value</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public string GetValue(string name) { NNStreamerError ret = NNStreamerError.None; IntPtr val = IntPtr.Zero; NNStreamer.CheckNNStreamerSupport(); /* Check the argument */ if (string.IsNullOrEmpty(name)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid"); } ret = Interop.SingleShot.GetValue(_handle, name, out val); if (ret != NNStreamerError.None) { if (ret == NNStreamerError.NotSupported) { NNStreamer.CheckException(ret, "Failed to to get the property, the property name is not available."); } else { NNStreamer.CheckException(ret, "Failed to to get the property, the property value is invalid."); } } return(Interop.Util.IntPtrToString(val)); }
/// <summary> /// Make TensorsInfo object from Native handle /// </summary> /// <param name="handle">Handle of TensorsInfo object</param> /// <returns>TensorsInfo object</returns> internal static TensorsInfo ConvertTensorsInfoFromHandle(IntPtr handle) { TensorsInfo retInfo = null; NNStreamerError ret = NNStreamerError.None; int count; ret = Interop.Util.GetTensorsCount(handle, out count); NNStreamer.CheckException(ret, "Fail to get Tensors' count"); retInfo = new TensorsInfo(); for (int i = 0; i < count; ++i) { string name; TensorType type; uint[] dim = new uint[Tensor.RankLimit]; ret = Interop.Util.GetTensorName(handle, i, out name); NNStreamer.CheckException(ret, "Fail to get Tensor's name"); ret = Interop.Util.GetTensorType(handle, i, out type); NNStreamer.CheckException(ret, "Fail to get Tensor's type"); ret = Interop.Util.GetTensorDimension(handle, i, dim); NNStreamer.CheckException(ret, "Fail to get Tensor's dimension"); retInfo.AddTensorInfo(name, type, (int[])(object)dim); } return(retInfo); }
/// <summary> /// Gets the normal node instance with given node name. /// </summary> /// <param name="name">The name of normal node.</param> /// <returns>The normal node instance</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public Node GetNormal(string name) { NNStreamer.CheckNNStreamerSupport(); /* Check the parameter */ if (string.IsNullOrEmpty(name)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid"); } Node node; if (_nodeList.ContainsKey(name)) { if (_nodeList[name].Type != NodeType.Normal) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a normal node"); } node = (Node)_nodeList[name]; } else { node = new Node(name, this); _nodeList.Add(name, node); } return(node); }
/// <summary> /// Gets the sink node instance with given node name. /// </summary> /// <param name="name">The name of sink node</param> /// <returns>The sink node instance</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public SinkNode GetSink(string name) { NNStreamer.CheckNNStreamerSupport(); /* Check the argument */ if (string.IsNullOrEmpty(name)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid"); } SinkNode node; if (_nodeList.ContainsKey(name)) { if (_nodeList[name].Type != NodeType.Sink) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a sink node"); } node = (SinkNode)_nodeList[name]; } else { node = new SinkNode(name, this); _nodeList.Add(name, node); } return(node); }
/// <summary> /// Sets the string of node's property in NNStreamer pipelines. /// </summary> /// <param name="propertyName">The property name.</param> /// <param name="value">The string of given property.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void SetValue(string propertyName, string value) { CheckSetParam(propertyName, value); NNStreamerError ret = Interop.Pipeline.SetPropertyString(Handle, propertyName, value); NNStreamer.CheckException(ret, string.Format("Failed to set {0} property.", propertyName)); }
/// <summary> /// Gets a tensor data to given index. /// </summary> /// <param name="index">The index of the tensor.</param> /// <returns>Raw tensor data</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public byte[] GetTensorData(int index) { NNStreamer.CheckNNStreamerSupport(); CheckIndex(index); return((byte[])_dataList[index]); }
/// <summary> /// Stops the pipeline, asynchronously. (The state would be changed to PipelineState.Paused) /// </summary> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="InvalidOperationException">Thrown when failed to stop the pipeline.</exception> /// <since_tizen> 8 </since_tizen> public void Stop() { NNStreamer.CheckNNStreamerSupport(); NNStreamerError ret = Interop.Pipeline.Stop(_handle); NNStreamer.CheckException(ret, "Failed to stop the pipeline because of internal error"); }
/// <summary> /// Get the value of node's property in NNStreamer pipelines. /// </summary> /// <typeparam name="T">The value type of given property.</typeparam> /// <param name="propertyName">The property name.</param> /// <returns>The value of given property.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public T GetValue <T>(string propertyName) { NNStreamerError ret; CheckGetParam(propertyName); if (typeof(bool).IsAssignableFrom(typeof(T))) { ret = Interop.Pipeline.GetPropertyBool(Handle, propertyName, out int value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); return((T)Convert.ChangeType(value == 0 ? false : true, typeof(T))); } else if (typeof(string).IsAssignableFrom(typeof(T))) { ret = Interop.Pipeline.GetPropertyString(Handle, propertyName, out string value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); return((T)Convert.ChangeType(value, typeof(T))); } else if (typeof(int).IsAssignableFrom(typeof(T))) { ret = Interop.Pipeline.GetPropertyInt32(Handle, propertyName, out int value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); return((T)Convert.ChangeType(value, typeof(T))); } else if (typeof(long).IsAssignableFrom(typeof(T))) { ret = Interop.Pipeline.GetPropertyInt64(Handle, propertyName, out long value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); return((T)Convert.ChangeType(value, typeof(T))); } else if (typeof(uint).IsAssignableFrom(typeof(T))) { ret = Interop.Pipeline.GetPropertyUInt32(Handle, propertyName, out uint value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); return((T)Convert.ChangeType(value, typeof(T))); } else if (typeof(ulong).IsAssignableFrom(typeof(T))) { ret = Interop.Pipeline.GetPropertyUInt64(Handle, propertyName, out ulong value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); return((T)Convert.ChangeType(value, typeof(T))); } else if (typeof(double).IsAssignableFrom(typeof(T))) { ret = Interop.Pipeline.GetPropertyDouble(Handle, propertyName, out double value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); return((T)Convert.ChangeType(value, typeof(T))); } throw new ArgumentException("The Input data type is not valid."); }
private void CheckGetParam(string propertyName) { NNStreamer.CheckNNStreamerSupport(); if (string.IsNullOrEmpty(propertyName)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Property name is invalid"); } }
/// <summary> /// Sets a tensor data to given index. /// </summary> /// <param name="index">The index of the tensor.</param> /// <param name="buffer">Raw tensor data to be set.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the data is not valid.</exception> /// <since_tizen> 6 </since_tizen> public void SetTensorData(int index, byte[] buffer) { NNStreamer.CheckNNStreamerSupport(); CheckIndex(index); CheckDataBuffer(index, buffer); _dataList[index] = buffer; }
internal SwitchNode(string name, Pipeline pipe) : base(NodeType.Switch, name, pipe) { IntPtr handle = IntPtr.Zero; NNStreamerError ret = Interop.Pipeline.GetSwitchHandle(pipe.GetHandle(), name, out _type, out handle); NNStreamer.CheckException(ret, "Failed to get the switch node handle: " + name); Handle = handle; }
internal Node(string name, Pipeline pipe) : base(NodeType.Normal, name, pipe) { IntPtr handle = IntPtr.Zero; NNStreamerError ret = Interop.Pipeline.GetElementHandle(pipe.GetHandle(), name, out handle); NNStreamer.CheckException(ret, "Failed to get the pipeline node handle: " + name); Handle = handle; }
/// <summary> /// Gets the boolean of node's property in NNStreamer pipelines. /// </summary> /// <param name="propertyName">The property name.</param> /// <param name="retValue">On return, a boolean value.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void GetValue(string propertyName, out bool retValue) { CheckGetParam(propertyName); NNStreamerError ret = Interop.Pipeline.GetPropertyBool(Handle, propertyName, out int value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); retValue = value == 0 ? false : true; }
/// <summary> /// Gets the floating-point value of node's property in NNStreamer pipelines. /// </summary> /// <param name="propertyName">The property name.</param> /// <param name="retValue">On return, a floating-point value.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void GetValue(string propertyName, out double retValue) { CheckGetParam(propertyName); NNStreamerError ret = Interop.Pipeline.GetPropertyDouble(Handle, propertyName, out double value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); retValue = value; }
/// <summary> /// Loads the neural network model and configures runtime environment /// </summary> /// <param name="modelAbsPath">Absolute path to the neural network model file.</param> /// <param name="inTensorsInfo">Input TensorsInfo object</param> /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo) { NNStreamer.CheckNNStreamerSupport(); if (inTensorsInfo == null || outTensorsInfo == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null"); } CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, NNFWType.Any, HWType.Any, false); }
/// <summary> /// Controls the valve. Set the flag true to open (let the flow pass), false to close (stop the flow). /// </summary> /// <param name="open">The flag to control the flow</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="InvalidOperationException">Thrown when the node is invalid.</exception> /// <since_tizen> 8 </since_tizen> public void Control(bool open) { if (!Valid) { NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name); } NNStreamerError ret = Interop.Pipeline.OpenValve(Handle, open); NNStreamer.CheckException(ret, "Failed to set valve status: " + Name); }
private void Unregister() { if (Handle != IntPtr.Zero) { /* Unregister the data callback from sink node */ NNStreamerError ret = Interop.Pipeline.UnregisterSinkCallback(Handle); NNStreamer.CheckException(ret, "Failed to unregister sink node callback: " + Name); Handle = IntPtr.Zero; } }
/// <summary> /// Loads the neural network model and configures runtime environment with Neural Network Framework and HW information /// </summary> /// <param name="modelAbsPath">Absolute path to the neural network model file.</param> /// <param name="inTensorsInfo">Input TensorsInfo object</param> /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param> /// <param name="fwType">Types of Neural Network Framework</param> /// <param name="hwType">Types of hardware resources to be used for NNFWs</param> /// <param name="isDynamicMode">Support Dynamic Mode</param> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 8 </since_tizen> public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo, NNFWType fwType, HWType hwType, bool isDynamicMode) { NNStreamer.CheckNNStreamerSupport(); if (inTensorsInfo == null || outTensorsInfo == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null"); } CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, fwType, hwType, isDynamicMode); }
internal void PrepareInvoke() { NNStreamerError ret = NNStreamerError.None; int count = _dataList.Count; for (int i = 0; i < count; ++i) { byte[] data = (byte[])_dataList[i]; ret = Interop.Util.SetTensorData(_handle, i, data, data.Length); NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + i.ToString()); } }
/// <summary> /// Invokes the model with the given input data. /// </summary> /// <param name="inTensorsData">The input data to be inferred.</param> /// <returns>TensorsData instance which contains the inferred result.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception> /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData Invoke(TensorsData inTensorsData) { TensorsData out_data; IntPtr out_ptr; NNStreamerError ret = NNStreamerError.None; ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr); NNStreamer.CheckException(ret, "fail to invoke the single inference engine"); out_data = TensorsData.CreateFromNativeHandle(out_ptr); return(out_data); }