/// <summary> /// Invokes the model with the given input data. /// </summary> /// <param name="inTensorsData">The input data to be inferred.</param> /// <returns>TensorsData instance which contains the inferred result.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData Invoke(TensorsData inTensorsData) { TensorsData out_data = null; IntPtr outDataPtr = IntPtr.Zero; NNStreamerError ret = NNStreamerError.None; NNStreamer.CheckNNStreamerSupport(); if (inTensorsData == null) { string msg = "TensorsData is null"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } if (_dynamicMode) { TensorsInfo inInfo = inTensorsData.TensorsInfo; if (inInfo == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null"); } /* Apply all data */ inTensorsData.PrepareInvoke(); IntPtr outInfoPtr = IntPtr.Zero; ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr); NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference"); out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true); } else { TensorsInfo data_inInfo = inTensorsData.TensorsInfo; if (!data_inInfo.Equals(_inInfo)) { string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } /* Apply all data */ inTensorsData.PrepareInvoke(); ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr); NNStreamer.CheckException(ret, "fail to invoke the single inference"); out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true); } return(out_data); }
/// <summary> /// Inputs tensor data to source node. /// </summary> /// <param name="data">The tensors data</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="InvalidOperationException">Thrown when the node is invalid, or failed to input tensors data.</exception> /// <since_tizen> 8 </since_tizen> public void Input(TensorsData data) { if (data == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given data is invalid"); } if (!Valid) { NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name); } data.PrepareInvoke(); NNStreamerError ret = Interop.Pipeline.InputSrcData(Handle, data.GetHandle(), PipelineBufferPolicy.NotFreed); NNStreamer.CheckException(ret, "Failed to input tensors data to source node: " + Name); }