/// <summary> /// Sets a tensor data to given index. /// </summary> /// <param name="index">The index of the tensor.</param> /// <param name="buffer">Raw tensor data to be set.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void SetTensorData(int index, byte[] buffer) { NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length); NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString()); }
/// <summary> /// Make TensorsInfo object from Native handle /// </summary> /// <param name="handle">Handle of TensorsInfo object</param> /// <returns>TensorsInfo object</returns> internal static TensorsInfo ConvertTensorsInfoFromHandle(IntPtr handle) { TensorsInfo retInfo = null; NNStreamerError ret = NNStreamerError.None; int count; ret = Interop.Util.GetTensorsCount(handle, out count); NNStreamer.CheckException(ret, "Fail to get Tensors' count"); retInfo = new TensorsInfo(); for (int i = 0; i < count; ++i) { string name; TensorType type; uint[] dim = new uint[Tensor.RankLimit]; ret = Interop.Util.GetTensorName(handle, i, out name); NNStreamer.CheckException(ret, "Fail to get Tensor's name"); ret = Interop.Util.GetTensorType(handle, i, out type); NNStreamer.CheckException(ret, "Fail to get Tensor's type"); ret = Interop.Util.GetTensorDimension(handle, i, dim); NNStreamer.CheckException(ret, "Fail to get Tensor's dimension"); retInfo.AddTensorInfo(name, type, (int[])(object)dim); } return(retInfo); }
/// <summary> /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects. /// </summary> /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param> protected virtual void Dispose(bool disposing) { if (_disposed) { return; } if (disposing) { // release managed objects _infoList.Clear(); } // release unmanaged objects if (_handle != IntPtr.Zero) { NNStreamerError ret = Interop.Util.DestroyTensorsInfo(_handle); if (ret != NNStreamerError.None) { Log.Error(NNStreamer.TAG, "failed to destroy TensorsInfo object"); } } _disposed = true; }
private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo, NNFWType FWType, HWType HWType, bool IsDynamicMode) { NNStreamerError ret = NNStreamerError.None; IntPtr input_info = IntPtr.Zero; IntPtr output_info = IntPtr.Zero; /* Check model path */ if (string.IsNullOrEmpty(modelAbsPath)) { ret = NNStreamerError.InvalidParameter; } NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath); /* Set Dynamic Mode */ _dynamicMode = IsDynamicMode; if (inTensorInfo != null) { input_info = inTensorInfo.GetTensorsInfoHandle(); _inInfo = inTensorInfo; } if (outTensorInfo != null) { output_info = outTensorInfo.GetTensorsInfoHandle(); _outInfo = outTensorInfo; } ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, FWType, HWType); NNStreamer.CheckException(ret, "fail to open the single inference engine"); }
/// <summary> /// Releases any unmanaged resources used by this object including opened handle. /// </summary> /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param> /// <since_tizen> 8 </since_tizen> protected virtual void Dispose(bool disposing) { if (_disposed) { return; } if (disposing) { // release managed objects } // release unmanaged objects if (Handle != IntPtr.Zero) { NNStreamerError ret = Interop.Pipeline.UnregisterCustomFilter(Handle); if (ret != NNStreamerError.None) { Log.Error(NNStreamer.TAG, "failed to destroy CustomFilter object"); } Handle = IntPtr.Zero; } _disposed = true; }
/// <summary> /// Releases any unmanaged resources used by this object including opened handle. /// </summary> /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param> /// <since_tizen> 8 </since_tizen> protected virtual void Dispose(bool disposing) { if (_disposed) { return; } if (disposing) { // release managed object } // release unmanaged objects if (_handle != IntPtr.Zero) { /* Note that, when destroying the pipeline, all node handles are released internally. */ foreach (NodeInfo node in _nodeList.Values) { node.Valid = false; } _nodeList.Clear(); NNStreamerError ret = Interop.Pipeline.Destroy(_handle); if (ret != NNStreamerError.None) { Log.Error(NNStreamer.TAG, "Failed to destroy the pipeline handle"); } _handle = IntPtr.Zero; } _disposed = true; }
/// <summary> /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects. /// </summary> /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param> protected virtual void Dispose(bool disposing) { if (_disposed) { return; } if (disposing) { // release managed object } // release unmanaged objects if (_handle != IntPtr.Zero) { NNStreamerError ret = NNStreamerError.None; ret = Interop.SingleShot.CloseSingle(_handle); if (ret != NNStreamerError.None) { Log.Error(NNStreamer.TAG, "failed to close inference engine"); } _handle = IntPtr.Zero; } _disposed = true; }
/// <summary> /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo. /// </summary> /// <param name="name">Name of Tensor.</param> /// <param name="type">Data element type of Tensor.</param> /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SlzeLimit)</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void AddTensorInfo(string name, TensorType type, int[] dimension) { NNStreamer.CheckNNStreamerSupport(); int idx = _infoList.Count; if (idx >= Tensor.SizeLimit) { throw new IndexOutOfRangeException("Max size of the tensors is " + Tensor.SizeLimit); } _infoList.Add(new TensorInfo(name, type, dimension)); if (_handle != IntPtr.Zero) { NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count); NNStreamer.CheckException(ret, "unable to set the number of tensors"); ret = Interop.Util.SetTensorType(_handle, idx, type); NNStreamer.CheckException(ret, "fail to set TensorsInfo type"); ret = Interop.Util.SetTensorDimension(_handle, idx, dimension); NNStreamer.CheckException(ret, "fail to set TensorsInfo dimension"); } }
/// <summary> Sets the property value for the given model. /// <para>A model/framework may support changing the model information, such as tensor dimension and data layout, after opening the model.</para> /// <para>If tries to change unavailable property or the model does not allow changing the information, this will raise an exception.</para> /// <para>For the details about the properties, see 'tensor_filter' plugin definition in <a href="https://github.com/nnstreamer/nnstreamer">NNStreamer</a>.</para> /// </summary> /// <param name="name">The property name</param> /// <param name="value">The property value</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void SetValue(string name, string value) { NNStreamerError ret = NNStreamerError.None; NNStreamer.CheckNNStreamerSupport(); /* Check the argument */ if (string.IsNullOrEmpty(name)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid"); } if (string.IsNullOrEmpty(value)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property value is invalid"); } ret = Interop.SingleShot.SetValue(_handle, name, value); if (ret != NNStreamerError.None) { if (ret == NNStreamerError.NotSupported) { NNStreamer.CheckException(ret, "Failed to to set the property, the property name is not available."); } else { NNStreamer.CheckException(ret, "Failed to to set the property, the property value is invalid."); } } }
/// <summary> /// Gets the property value for the given model. /// </summary> /// <param name="name">The property name</param> /// <returns>The property value</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public string GetValue(string name) { NNStreamerError ret = NNStreamerError.None; IntPtr val = IntPtr.Zero; NNStreamer.CheckNNStreamerSupport(); /* Check the argument */ if (string.IsNullOrEmpty(name)) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid"); } ret = Interop.SingleShot.GetValue(_handle, name, out val); if (ret != NNStreamerError.None) { if (ret == NNStreamerError.NotSupported) { NNStreamer.CheckException(ret, "Failed to to get the property, the property name is not available."); } else { NNStreamer.CheckException(ret, "Failed to to get the property, the property value is invalid."); } } return(Interop.Util.IntPtrToString(val)); }
internal static Exception CreateException(NNStreamerError err, string msg) { Exception exp; switch (err) { case NNStreamerError.InvalidParameter: exp = new ArgumentException(msg); break; case NNStreamerError.NotSupported: exp = new NotSupportedException(msg); break; case NNStreamerError.StreamsPipe: case NNStreamerError.TryAgain: exp = new IOException(msg); break; case NNStreamerError.TimedOut: exp = new TimeoutException(msg); break; case NNStreamerError.QuotaExceeded: exp = new IndexOutOfRangeException(msg); break; default: exp = new NotSupportedException(msg); break; } return(exp); }
/// <summary> /// Sets the string of node's property in NNStreamer pipelines. /// </summary> /// <param name="propertyName">The property name.</param> /// <param name="value">The string of given property.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void SetValue(string propertyName, string value) { CheckSetParam(propertyName, value); NNStreamerError ret = Interop.Pipeline.SetPropertyString(Handle, propertyName, value); NNStreamer.CheckException(ret, string.Format("Failed to set {0} property.", propertyName)); }
private static int _alreadyChecked = -1; /* -1: not yet, 0: Not Support, 1: Support */ internal static void CheckException(NNStreamerError error, string msg) { if (error != NNStreamerError.None) { Log.Error(NNStreamer.TAG, msg + ": " + error.ToString()); throw NNStreamerExceptionFactory.CreateException(error, msg); } }
/// <summary> /// Stops the pipeline, asynchronously. (The state would be changed to PipelineState.Paused) /// </summary> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="InvalidOperationException">Thrown when failed to stop the pipeline.</exception> /// <since_tizen> 8 </since_tizen> public void Stop() { NNStreamer.CheckNNStreamerSupport(); NNStreamerError ret = Interop.Pipeline.Stop(_handle); NNStreamer.CheckException(ret, "Failed to stop the pipeline because of internal error"); }
/// <summary> /// Gets the boolean of node's property in NNStreamer pipelines. /// </summary> /// <param name="propertyName">The property name.</param> /// <param name="retValue">On return, a boolean value.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void GetValue(string propertyName, out bool retValue) { CheckGetParam(propertyName); NNStreamerError ret = Interop.Pipeline.GetPropertyBool(Handle, propertyName, out int value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); retValue = value == 0 ? false : true; }
internal Node(string name, Pipeline pipe) : base(NodeType.Normal, name, pipe) { IntPtr handle = IntPtr.Zero; NNStreamerError ret = Interop.Pipeline.GetElementHandle(pipe.GetHandle(), name, out handle); NNStreamer.CheckException(ret, "Failed to get the pipeline node handle: " + name); Handle = handle; }
internal SwitchNode(string name, Pipeline pipe) : base(NodeType.Switch, name, pipe) { IntPtr handle = IntPtr.Zero; NNStreamerError ret = Interop.Pipeline.GetSwitchHandle(pipe.GetHandle(), name, out _type, out handle); NNStreamer.CheckException(ret, "Failed to get the switch node handle: " + name); Handle = handle; }
/// <summary> /// Gets the floating-point value of node's property in NNStreamer pipelines. /// </summary> /// <param name="propertyName">The property name.</param> /// <param name="retValue">On return, a floating-point value.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void GetValue(string propertyName, out double retValue) { CheckGetParam(propertyName); NNStreamerError ret = Interop.Pipeline.GetPropertyDouble(Handle, propertyName, out double value); NNStreamer.CheckException(ret, string.Format("Failed to get {0} property.", propertyName)); retValue = value; }
/// <summary> /// Controls the valve. Set the flag true to open (let the flow pass), false to close (stop the flow). /// </summary> /// <param name="open">The flag to control the flow</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="InvalidOperationException">Thrown when the node is invalid.</exception> /// <since_tizen> 8 </since_tizen> public void Control(bool open) { if (!Valid) { NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name); } NNStreamerError ret = Interop.Pipeline.OpenValve(Handle, open); NNStreamer.CheckException(ret, "Failed to set valve status: " + Name); }
private void Unregister() { if (Handle != IntPtr.Zero) { /* Unregister the data callback from sink node */ NNStreamerError ret = Interop.Pipeline.UnregisterSinkCallback(Handle); NNStreamer.CheckException(ret, "Failed to unregister sink node callback: " + Name); Handle = IntPtr.Zero; } }
/// <summary> /// Sets the tensor dimension with given index and dimension. /// </summary> /// <param name="idx">The index of the tensor to be updated.</param> /// <param name="dimension">The tensor dimension to be set.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void SetDimension(int idx, int[] dimension) { CheckIndexBoundary(idx); _infoList[idx].SetDimension(dimension); if (_handle != IntPtr.Zero) { NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.SetTensorDimension(_handle, idx, dimension); NNStreamer.CheckException(ret, "unable to set the dimension of tensor: " + idx.ToString()); } }
internal void PrepareInvoke() { NNStreamerError ret = NNStreamerError.None; int count = _dataList.Count; for (int i = 0; i < count; ++i) { byte[] data = (byte[])_dataList[i]; ret = Interop.Util.SetTensorData(_handle, i, data, data.Length); NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + i.ToString()); } }
/// <summary> /// Invokes the model with the given input data. /// </summary> /// <param name="inTensorsData">The input data to be inferred.</param> /// <returns>TensorsData instance which contains the inferred result.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception> /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData Invoke(TensorsData inTensorsData) { TensorsData out_data; IntPtr out_ptr; NNStreamerError ret = NNStreamerError.None; ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr); NNStreamer.CheckException(ret, "fail to invoke the single inference engine"); out_data = TensorsData.CreateFromNativeHandle(out_ptr); return(out_data); }
/// <summary> /// Sets the tensor type with given index and its type. /// </summary> /// <param name="idx">The index of the tensor to be updated.</param> /// <param name="type">The tensor type to be set.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="IndexOutOfRangeException">Thrown when the index is greater than the number of Tensor.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void SetTensorType(int idx, TensorType type) { CheckIndexBoundary(idx); _infoList[idx].Type = type; if (_handle != IntPtr.Zero) { NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.SetTensorType(_handle, idx, type); NNStreamer.CheckException(ret, "unable to set the type of tensor: " + idx.ToString()); } }
private void Register() { if (Handle == IntPtr.Zero) { IntPtr handle = IntPtr.Zero; /* Register new data callback to sink node */ NNStreamerError ret = Interop.Pipeline.RegisterSinkCallback(Pipe.GetHandle(), Name, _dataCallback, IntPtr.Zero, out handle); NNStreamer.CheckException(ret, "Failed to register sink node callback: " + Name); Handle = handle; } }
/// <summary> /// Sets the maximum amount of time to wait for an output, in milliseconds. /// </summary> /// <param name="ms">The time to wait for an output (milliseconds)</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <since_tizen> 8 </since_tizen> public void SetTimeout(int ms) { NNStreamer.CheckNNStreamerSupport(); NNStreamerError ret = NNStreamerError.None; if (ms <= 0) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Invalid timeout: " + ms.ToString()); } ret = Interop.SingleShot.SetTimeout(_handle, ms); NNStreamer.CheckException(ret, "fail to set the timeout!"); }
/// <summary> /// Invokes the model with the given input data. /// </summary> /// <param name="inTensorsData">The input data to be inferred.</param> /// <returns>TensorsData instance which contains the inferred result.</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public TensorsData Invoke(TensorsData inTensorsData) { TensorsData out_data = null; IntPtr outDataPtr = IntPtr.Zero; NNStreamerError ret = NNStreamerError.None; NNStreamer.CheckNNStreamerSupport(); if (inTensorsData == null) { string msg = "TensorsData is null"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } if (_dynamicMode) { TensorsInfo inInfo = inTensorsData.TensorsInfo; if (inInfo == null) { throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null"); } /* Apply all data */ inTensorsData.PrepareInvoke(); IntPtr outInfoPtr = IntPtr.Zero; ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr); NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference"); out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true); } else { TensorsInfo data_inInfo = inTensorsData.TensorsInfo; if (!data_inInfo.Equals(_inInfo)) { string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } /* Apply all data */ inTensorsData.PrepareInvoke(); ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr); NNStreamer.CheckException(ret, "fail to invoke the single inference"); out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true); } return(out_data); }
/// <summary> /// Gets a tensor data to given index. /// </summary> /// <param name="index">The index of the tensor.</param> /// <returns>Raw tensor data</returns> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public byte[] GetTensorData(int index) { byte[] retBuffer; IntPtr raw_data; int size; NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.GetTensorData(_handle, index, out raw_data, out size); NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + index.ToString()); retBuffer = Interop.Util.IntPtrToByteArray(raw_data, size); return(retBuffer); }
/// <summary> /// Sets the tensor name with given index. /// </summary> /// <param name="idx">The index of the tensor to be updated.</param> /// <param name="name">The tensor name to be set.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void SetTensorName(int idx, string name) { NNStreamer.CheckNNStreamerSupport(); CheckIndexBoundary(idx); _infoList[idx].Name = name; if (_handle != IntPtr.Zero) { NNStreamerError ret = NNStreamerError.None; ret = Interop.Util.SetTensorName(_handle, idx, name); NNStreamer.CheckException(ret, "unable to set the name of tensor: " + idx.ToString()); } }
/// <summary> /// Sets a tensor data to given index. /// </summary> /// <param name="index">The index of the tensor.</param> /// <param name="buffer">Raw tensor data to be set.</param> /// <feature>http://tizen.org/feature/machine_learning.inference</feature> /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception> /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception> /// <since_tizen> 6 </since_tizen> public void SetTensorData(int index, byte[] buffer) { NNStreamerError ret = NNStreamerError.None; NNStreamer.CheckNNStreamerSupport(); if (buffer == null) { string msg = "buffer is null"; throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg); } ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length); NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString()); }