/// <summary> /// Invokes the model with the given input data. /// </summary> /// <param name="inTensorsData">The input data to be inferred.</param> /// <returns>TensorsData instance which contains the inferred result.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception> /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData Invoke(TensorsData inTensorsData) { TensorsData out_data; IntPtr out_ptr; NNStreamerError ret = NNStreamerError.None; ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr); NNStreamer.CheckException(ret, "fail to invoke the single inference engine"); out_data = TensorsData.CreateFromNativeHandle(out_ptr); return(out_data); }
internal SinkNode(string name, Pipeline pipe) : base(NodeType.Sink, name, pipe) { _dataCallback = (data_handle, Info_handle, _) => { if (Valid) { TensorsData data = TensorsData.CreateFromNativeHandle(data_handle, Info_handle, true, false); _dataReceived?.Invoke(this, new DataReceivedEventArgs(data)); } }; Register(); }
/// <summary> /// Invokes the model with the given input data. /// </summary> /// <param name="inTensorsData">The input data to be inferred.</param> /// <returns>TensorsData instance which contains the inferred result.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData Invoke(TensorsData inTensorsData) { TensorsData out_data = null; IntPtr outDataPtr = IntPtr.Zero; NNStreamerError ret = NNStreamerError.None; NNStreamer.CheckNNStreamerSupport(); if (inTensorsData == null) { string msg = "TensorsData is null"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } if (_dynamicMode) { TensorsInfo inInfo = inTensorsData.TensorsInfo; if (inInfo == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null"); } /* Apply all data */ inTensorsData.PrepareInvoke(); IntPtr outInfoPtr = IntPtr.Zero; ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr); NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference"); out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true); } else { TensorsInfo data_inInfo = inTensorsData.TensorsInfo; if (!data_inInfo.Equals(_inInfo)) { string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } /* Apply all data */ inTensorsData.PrepareInvoke(); ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr); NNStreamer.CheckException(ret, "fail to invoke the single inference"); out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true); } return(out_data); }
/// <summary> /// Invokes the model with the given input data. /// </summary> /// <param name="inTensorsData">The input data to be inferred.</param> /// <returns>TensorsData instance which contains the inferred result.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception> /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData Invoke(TensorsData inTensorsData) { TensorsData out_data; IntPtr out_ptr = IntPtr.Zero; NNStreamerError ret = NNStreamerError.None; if (inTensorsData == null) { string msg = "TensorsData is null"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr); NNStreamer.CheckException(ret, "fail to invoke the single inference engine"); out_data = TensorsData.CreateFromNativeHandle(out_ptr); return(out_data); }
/// <summary> /// Creates a TensorsData instance based on informations of TensorsInfo /// </summary> /// <returns>TensorsData instance</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to TensorsInfo's information is invalid.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData GetTensorsData() { IntPtr tensorsData_h; TensorsData retTensorData; NNStreamerError ret = NNStreamerError.None; if (_handle == IntPtr.Zero) { Log.Info(NNStreamer.TAG, "_handle is IntPtr.Zero\n" + " GetTensorsInfoHandle() is called"); GetTensorsInfoHandle(); } ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h); NNStreamer.CheckException(ret, "unable to create the tensorsData object"); Log.Info(NNStreamer.TAG, "success to CreateTensorsData()\n"); retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h); return(retTensorData); }
private CustomFilter(string name, TensorsInfo inInfo, TensorsInfo outInfo, Func <TensorsData, TensorsData> filter) { /* Parameger check */ if (name == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given name is null"); } if (inInfo == null || outInfo == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given TensorsInfo is null"); } if (filter == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given Callback interface is null"); } _nativeCallback = (in_data_handle, out_data_handle, _) => { TensorsData inData = TensorsData.CreateFromNativeHandle(in_data_handle, IntPtr.Zero, true, false); TensorsData outData = _filter(inData); out_data_handle = outData.GetHandle(); }; IntPtr handle = IntPtr.Zero; /* Create custom filter callback */ NNStreamerError ret = Interop.Pipeline.RegisterCustomFilter(name, inInfo.GetTensorsInfoHandle(), outInfo.GetTensorsInfoHandle(), _nativeCallback, IntPtr.Zero, out handle); NNStreamer.CheckException(ret, "Failed to create custom filter function: " + name); /* Set internal member */ _InInfo = inInfo; _OutInfo = outInfo; _filter = filter; Name = name; Handle = handle; }
/// <summary> /// Creates a TensorsData instance based on informations of TensorsInfo /// </summary> /// <returns>TensorsData instance</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to TensorsInfo's information is invalid.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData GetTensorsData() { IntPtr tensorsData_h = IntPtr.Zero; TensorsData retTensorData; NNStreamerError ret = NNStreamerError.None; NNStreamer.CheckNNStreamerSupport(); if (_handle == IntPtr.Zero) { Log.Info(NNStreamer.TAG, "_handle is IntPtr.Zero\n" + " GetTensorsInfoHandle() is called"); GetTensorsInfoHandle(); } ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h); NNStreamer.CheckException(ret, "Failed to create the TensorsData object"); retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h, _handle, false); return(retTensorData); }