Esempio n. 1
0
        /// <summary>
        /// Releases any unmanaged resources used by this object. Can also dispose any other disposable objects.
        /// </summary>
        /// <param name="disposing">If true, disposes any disposable objects. If false, does not dispose disposable objects.</param>
        protected virtual void Dispose(bool disposing)
        {
            if (_disposed)
            {
                return;
            }

            if (disposing)
            {
                // release managed object
                _tensorsInfo.Dispose();
                _tensorsInfo = null;
            }

            // release unmanaged objects
            if (_handle != IntPtr.Zero)
            {
                NNStreamerError ret = Interop.Util.DestroyTensorsData(_handle);
                if (ret != NNStreamerError.None)
                {
                    Log.Error(NNStreamer.TAG, "failed to destroy TensorsData object");
                }
                _handle = IntPtr.Zero;
            }
            _disposed = true;
        }
Esempio n. 2
0
        private void CreateSingleShot(string modelAbsPath,
                                      TensorsInfo inTensorInfo, TensorsInfo outTensorInfo,
                                      NNFWType FWType, HWType HWType, bool IsDynamicMode)
        {
            NNStreamerError ret         = NNStreamerError.None;
            IntPtr          input_info  = IntPtr.Zero;
            IntPtr          output_info = IntPtr.Zero;

            /* Check model path */
            if (string.IsNullOrEmpty(modelAbsPath))
            {
                ret = NNStreamerError.InvalidParameter;
            }
            NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);

            /* Set Dynamic Mode */
            _dynamicMode = IsDynamicMode;

            if (inTensorInfo != null)
            {
                input_info = inTensorInfo.GetTensorsInfoHandle();
                _inInfo    = inTensorInfo;
            }

            if (outTensorInfo != null)
            {
                output_info = outTensorInfo.GetTensorsInfoHandle();
                _outInfo    = outTensorInfo;
            }

            ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, FWType, HWType);
            NNStreamer.CheckException(ret, "fail to open the single inference engine");
        }
Esempio n. 3
0
        /// <summary>
        /// Creates new custom-filter with input and output tensors information.
        /// </summary>
        /// <param name="name">The name of custom-filter</param>
        /// <param name="inInfo">The input tensors information</param>
        /// <param name="outInfo">The output tensors information</param>
        /// <param name="filter">Delegate to be called while processing the pipeline</param>
        /// <returns>CustomFiter instance</returns>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <since_tizen> 8 </since_tizen>
        public static CustomFilter Create(string name,
                                          TensorsInfo inInfo, TensorsInfo outInfo, Func <TensorsData, TensorsData> filter)
        {
            NNStreamer.CheckNNStreamerSupport();

            return(new CustomFilter(name, inInfo, outInfo, filter));
        }
Esempio n. 4
0
        /// <summary>
        /// Loads the neural network model and configures runtime environment
        /// </summary>
        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsInfo == null || outTensorsInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
            }

            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, NNFWType.Any, HWType.Any, false);
        }
Esempio n. 5
0
        internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch = false, bool hasOwnership = true)
        {
            TensorsInfo info = null;

            if (infoHandle != IntPtr.Zero)
            {
                info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
            }

            return(new TensorsData(dataHandle, info, isFetch, hasOwnership));
        }
Esempio n. 6
0
        /// <summary>
        /// Loads the neural network model and configures runtime environment with Neural Network Framework and HW information
        /// </summary>
        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
        /// <param name="fwType">Types of Neural Network Framework</param>
        /// <param name="hwType">Types of hardware resources to be used for NNFWs</param>
        /// <param name="isDynamicMode">Support Dynamic Mode</param>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 8 </since_tizen>
        public SingleShot(string modelAbsPath,
                          TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo, NNFWType fwType, HWType hwType, bool isDynamicMode)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsInfo == null || outTensorsInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
            }

            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, fwType, hwType, isDynamicMode);
        }
Esempio n. 7
0
        /// <summary>
        /// Loads the neural network model and configures runtime environment
        /// </summary>
        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="IOException">Thrown when constructing the pipeline is failed.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsInfo == null || outTensorsInfo == null)
            {
                string msg = "TensorsInfo is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
        }
Esempio n. 8
0
        /// <summary>
        /// Allocates a new TensorsData instance with the given tensors information.
        /// </summary>
        /// <param name="info">TensorsInfo object which has Tensor information</param>
        /// <returns>The TensorsInfo instance</returns>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 8 </since_tizen>
        public static TensorsData Allocate(TensorsInfo info)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (info == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
            }

            TensorsData retData = info.GetTensorsData();

            return(retData);
        }
Esempio n. 9
0
        /// <summary>
        /// Invokes the model with the given input data.
        /// </summary>
        /// <param name="inTensorsData">The input data to be inferred.</param>
        /// <returns>TensorsData instance which contains the inferred result.</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public TensorsData Invoke(TensorsData inTensorsData)
        {
            TensorsData     out_data   = null;
            IntPtr          outDataPtr = IntPtr.Zero;
            NNStreamerError ret        = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsData == null)
            {
                string msg = "TensorsData is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            if (_dynamicMode)
            {
                TensorsInfo inInfo = inTensorsData.TensorsInfo;
                if (inInfo == null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                IntPtr outInfoPtr = IntPtr.Zero;
                ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true);
            }
            else
            {
                TensorsInfo data_inInfo = inTensorsData.TensorsInfo;

                if (!data_inInfo.Equals(_inInfo))
                {
                    string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object";
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true);
            }
            return(out_data);
        }
Esempio n. 10
0
        internal static TensorsData CreateFromNativeHandle(IntPtr dataHandle, IntPtr infoHandle, bool isFetch)
        {
            TensorsData retTensorsData = null;

            if (infoHandle == IntPtr.Zero)
            {
                retTensorsData = new TensorsData(dataHandle, null, isFetch);
            }
            else
            {
                TensorsInfo info = TensorsInfo.ConvertTensorsInfoFromHandle(infoHandle);
                retTensorsData = new TensorsData(dataHandle, info, isFetch);
            }

            return(retTensorsData);
        }
Esempio n. 11
0
        /// <summary>
        /// Creates a TensorsData instance with handle which is given by TensorsInfo.
        /// </summary>
        /// <param name="handle">The handle of tensors data.</param>
        /// <param name="info">The handle of tensors info.</param>
        /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
        /// <param name="hasOwnership">The boolean value for automatic disposal (Default: true)</param>
        /// <since_tizen> 6 </since_tizen>
        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch = false, bool hasOwnership = true)
        {
            NNStreamer.CheckNNStreamerSupport();
            NNStreamerError ret = NNStreamerError.None;

            /* Set internal object */
            _handle = handle;
            /* Because developers can change the TensorsInfo object, it should be stored as a deep-copied instance. */
            _tensorsInfo = info.Clone();

            /* Set count */
            int count = 0;

            ret = Interop.Util.GetTensorsCount(_handle, out count);
            NNStreamer.CheckException(ret, "unable to get the count of TensorsData");

            _dataList = new ArrayList(count);

            if (isFetch)
            {
                for (int i = 0; i < count; ++i)
                {
                    IntPtr raw_data;
                    byte[] bufData = null;
                    int    size;

                    ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
                    NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());

                    bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
                    _dataList.Add(bufData);
                }
            }
            else
            {
                for (int i = 0; i < count; ++i)
                {
                    int    size    = info.GetTensorSize(i);
                    byte[] bufData = new byte[size];

                    _dataList.Add(bufData);
                }
            }

            /* If it created as DataReceivedEventArgs, do not dispose. */
            _disposed = !hasOwnership;
        }
Esempio n. 12
0
        private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo)
        {
            NNStreamerError ret = NNStreamerError.None;
            IntPtr          input_info;
            IntPtr          output_info;

            /* Check model path */
            if (string.IsNullOrEmpty(modelAbsPath))
            {
                ret = NNStreamerError.InvalidParameter;
            }
            NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);

            input_info  = inTensorInfo.GetTensorsInfoHandle();
            output_info = outTensorInfo.GetTensorsInfoHandle();

            ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, NNFWType.Any, HWType.Any);
            NNStreamer.CheckException(ret, "fail to open the single inference engine");
        }
Esempio n. 13
0
        /// <summary>
        /// Creates a TensorsData instance with handle which is given by TensorsInfo.
        /// </summary>
        /// <param name="handle">The handle of tensors data.</param>
        /// <param name="info">The handle of tensors info. (Default: null)</param>
        /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
        /// <since_tizen> 6 </since_tizen>
        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch)
        {
            NNStreamer.CheckNNStreamerSupport();
            NNStreamerError ret = NNStreamerError.None;

            /* Set internal object */
            _handle      = handle;
            _tensorsInfo = info;

            /* Set count */
            int count = 0;

            ret = Interop.Util.GetTensorsCount(_handle, out count);
            NNStreamer.CheckException(ret, "unable to get the count of TensorsData");

            _dataList = new ArrayList(count);

            if (isFetch)
            {
                for (int i = 0; i < count; ++i)
                {
                    IntPtr raw_data;
                    byte[] bufData = null;
                    int    size;

                    ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
                    NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());

                    bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
                    _dataList.Add(bufData);
                }
            }
            else
            {
                for (int i = 0; i < count; ++i)
                {
                    int    size    = info.GetTensorSize(i);
                    byte[] bufData = new byte[size];

                    _dataList.Add(bufData);
                }
            }
        }
Esempio n. 14
0
        private CustomFilter(string name, TensorsInfo inInfo, TensorsInfo outInfo, Func <TensorsData, TensorsData> filter)
        {
            /* Parameger check */
            if (name == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given name is null");
            }

            if (inInfo == null || outInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given TensorsInfo is null");
            }

            if (filter == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given Callback interface is null");
            }

            _nativeCallback = (in_data_handle, out_data_handle, _) =>
            {
                TensorsData inData  = TensorsData.CreateFromNativeHandle(in_data_handle, IntPtr.Zero, true, false);
                TensorsData outData = _filter(inData);
                out_data_handle = outData.GetHandle();
            };

            IntPtr handle = IntPtr.Zero;

            /* Create custom filter callback */
            NNStreamerError ret = Interop.Pipeline.RegisterCustomFilter(name,
                                                                        inInfo.GetTensorsInfoHandle(), outInfo.GetTensorsInfoHandle(), _nativeCallback, IntPtr.Zero, out handle);

            NNStreamer.CheckException(ret, "Failed to create custom filter function: " + name);

            /* Set internal member */
            _InInfo  = inInfo;
            _OutInfo = outInfo;
            _filter  = filter;

            Name   = name;
            Handle = handle;
        }
Esempio n. 15
0
 /// <summary>
 /// Loads the neural network model and configures runtime environment
 /// </summary>
 /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
 /// <param name="inTensorsInfo">Input TensorsInfo object</param>
 /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
 /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
 /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
 /// <exception cref="IOException">Thrown when constructing the pipeline is failed.</exception>
 /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
 /// <since_tizen> 6 </since_tizen>
 public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
 {
     CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
 }