Пример #1
0
        /// <summary>
        /// Sets the maximum amount of time to wait for an output, in milliseconds.
        /// </summary>
        /// <param name="ms">The time to wait for an output (milliseconds)</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <since_tizen> 8 </since_tizen>
        public void SetTimeout(int ms)
        {
            NNStreamer.CheckNNStreamerSupport();
            NNStreamerError ret = NNStreamerError.None;

            if (ms <= 0)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Invalid timeout: " + ms.ToString());
            }

            ret = Interop.SingleShot.SetTimeout(_handle, ms);
            NNStreamer.CheckException(ret, "fail to set the timeout!");
        }
Пример #2
0
        /// <summary>
        /// Allocates a new TensorsData instance with the given tensors information.
        /// </summary>
        /// <param name="info">TensorsInfo object which has Tensor information</param>
        /// <returns>The TensorsInfo instance</returns>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 8 </since_tizen>
        public static TensorsData Allocate(TensorsInfo info)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (info == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
            }

            TensorsData retData = info.GetTensorsData();

            return(retData);
        }
Пример #3
0
        /// <summary>
        /// Invokes the model with the given input data.
        /// </summary>
        /// <param name="inTensorsData">The input data to be inferred.</param>
        /// <returns>TensorsData instance which contains the inferred result.</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public TensorsData Invoke(TensorsData inTensorsData)
        {
            TensorsData     out_data   = null;
            IntPtr          outDataPtr = IntPtr.Zero;
            NNStreamerError ret        = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsData == null)
            {
                string msg = "TensorsData is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            if (_dynamicMode)
            {
                TensorsInfo inInfo = inTensorsData.TensorsInfo;
                if (inInfo == null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                IntPtr outInfoPtr = IntPtr.Zero;
                ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true);
            }
            else
            {
                TensorsInfo data_inInfo = inTensorsData.TensorsInfo;

                if (!data_inInfo.Equals(_inInfo))
                {
                    string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object";
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true);
            }
            return(out_data);
        }
Пример #4
0
        /// <summary>
        /// Sets the tensor dimension with given index and dimension.
        /// </summary>
        /// <param name="idx">The index of the tensor to be updated.</param>
        /// <param name="dimension">The tensor dimension to be set.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public void SetDimension(int idx, int[] dimension)
        {
            NNStreamer.CheckNNStreamerSupport();

            CheckIndexBoundary(idx);
            _infoList[idx].SetDimension(dimension);

            if (_handle != IntPtr.Zero)
            {
                NNStreamerError ret = NNStreamerError.None;
                ret = Interop.Util.SetTensorDimension(_handle, idx, dimension);
                NNStreamer.CheckException(ret, "unable to set the dimension of tensor: " + idx.ToString());
            }
        }
Пример #5
0
        /// <summary>
        /// Sets the tensor type with given index and its type.
        /// </summary>
        /// <param name="idx">The index of the tensor to be updated.</param>
        /// <param name="type">The tensor type to be set.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public void SetTensorType(int idx, TensorType type)
        {
            NNStreamer.CheckNNStreamerSupport();

            CheckIndexBoundary(idx);
            _infoList[idx].Type = type;

            if (_handle != IntPtr.Zero)
            {
                NNStreamerError ret = NNStreamerError.None;
                ret = Interop.Util.SetTensorType(_handle, idx, type);
                NNStreamer.CheckException(ret, "unable to set the type of tensor: " + idx.ToString());
            }
        }
Пример #6
0
            private void CheckSetParam(string propertyName, object value)
            {
                NNStreamer.CheckNNStreamerSupport();

                if (string.IsNullOrEmpty(propertyName))
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Property name is invalid");
                }

                if (value is null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Value is invalid");
                }
            }
Пример #7
0
        /// <summary>
        /// Sets a tensor data to given index.
        /// </summary>
        /// <param name="index">The index of the tensor.</param>
        /// <param name="buffer">Raw tensor data to be set.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public void SetTensorData(int index, byte[] buffer)
        {
            NNStreamerError ret = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (buffer == null)
            {
                string msg = "buffer is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length);
            NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString());
        }
Пример #8
0
        /// <summary>
        /// Gets a tensor data to given index.
        /// </summary>
        /// <param name="index">The index of the tensor.</param>
        /// <returns>Raw tensor data</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public byte[] GetTensorData(int index)
        {
            byte[]          retBuffer = null;
            IntPtr          raw_data;
            int             size;
            NNStreamerError ret = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            ret = Interop.Util.GetTensorData(_handle, index, out raw_data, out size);
            NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + index.ToString());

            retBuffer = Interop.Util.IntPtrToByteArray(raw_data, size);

            return(retBuffer);
        }
Пример #9
0
        /// <summary>
        /// Creates a TensorsData instance with handle which is given by TensorsInfo.
        /// </summary>
        /// <param name="handle">The handle of tensors data.</param>
        /// <param name="info">The handle of tensors info.</param>
        /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
        /// <param name="hasOwnership">The boolean value for automatic disposal (Default: true)</param>
        /// <since_tizen> 6 </since_tizen>
        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch = false, bool hasOwnership = true)
        {
            NNStreamer.CheckNNStreamerSupport();
            NNStreamerError ret = NNStreamerError.None;

            /* Set internal object */
            _handle = handle;
            /* Because developers can change the TensorsInfo object, it should be stored as a deep-copied instance. */
            _tensorsInfo = info.Clone();

            /* Set count */
            int count = 0;

            ret = Interop.Util.GetTensorsCount(_handle, out count);
            NNStreamer.CheckException(ret, "unable to get the count of TensorsData");

            _dataList = new ArrayList(count);

            if (isFetch)
            {
                for (int i = 0; i < count; ++i)
                {
                    IntPtr raw_data;
                    byte[] bufData = null;
                    int    size;

                    ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
                    NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());

                    bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
                    _dataList.Add(bufData);
                }
            }
            else
            {
                for (int i = 0; i < count; ++i)
                {
                    int    size    = info.GetTensorSize(i);
                    byte[] bufData = new byte[size];

                    _dataList.Add(bufData);
                }
            }

            /* If it created as DataReceivedEventArgs, do not dispose. */
            _disposed = !hasOwnership;
        }
Пример #10
0
        /// <summary>
        /// Creates a TensorsData instance with handle which is given by TensorsInfo.
        /// </summary>
        /// <param name="handle">The handle of tensors data.</param>
        /// <param name="info">The handle of tensors info. (Default: null)</param>
        /// <param name="isFetch">The boolean value for fetching the data (Default: false)</param>
        /// <since_tizen> 6 </since_tizen>
        private TensorsData(IntPtr handle, TensorsInfo info, bool isFetch)
        {
            NNStreamer.CheckNNStreamerSupport();
            NNStreamerError ret = NNStreamerError.None;

            /* Set internal object */
            _handle      = handle;
            _tensorsInfo = info;

            /* Set count */
            int count = 0;

            ret = Interop.Util.GetTensorsCount(_handle, out count);
            NNStreamer.CheckException(ret, "unable to get the count of TensorsData");

            _dataList = new ArrayList(count);

            if (isFetch)
            {
                for (int i = 0; i < count; ++i)
                {
                    IntPtr raw_data;
                    byte[] bufData = null;
                    int    size;

                    ret = Interop.Util.GetTensorData(_handle, i, out raw_data, out size);
                    NNStreamer.CheckException(ret, "unable to get the buffer of TensorsData: " + i.ToString());

                    bufData = Interop.Util.IntPtrToByteArray(raw_data, size);
                    _dataList.Add(bufData);
                }
            }
            else
            {
                for (int i = 0; i < count; ++i)
                {
                    int    size    = info.GetTensorSize(i);
                    byte[] bufData = new byte[size];

                    _dataList.Add(bufData);
                }
            }
        }
Пример #11
0
        /// <summary>
        /// Creates a TensorsData instance based on informations of TensorsInfo
        /// </summary>
        /// <returns>TensorsData instance</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to TensorsInfo's information is invalid.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public TensorsData GetTensorsData()
        {
            IntPtr          tensorsData_h = IntPtr.Zero;
            TensorsData     retTensorData;
            NNStreamerError ret = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (_handle == IntPtr.Zero)
            {
                Log.Info(NNStreamer.TAG, "_handle is IntPtr.Zero\n" + "  GetTensorsInfoHandle() is called");
                GetTensorsInfoHandle();
            }

            ret = Interop.Util.CreateTensorsData(_handle, out tensorsData_h);
            NNStreamer.CheckException(ret, "Failed to create the TensorsData object");

            retTensorData = TensorsData.CreateFromNativeHandle(tensorsData_h, _handle, false);

            return(retTensorData);
        }
Пример #12
0
        /// <summary>
        /// Creates a new Pipeline instance with the given pipeline description
        /// </summary>
        /// <remarks>http://tizen.org/privilege/mediastorage is needed if pipeline description is relevant to media storage.</remarks>
        /// <remarks>http://tizen.org/privilege/externalstorage is needed if pipeline description is relevant to external storage.</remarks>
        /// <remarks>http://tizen.org/privilege/camera is needed if pipeline description accesses the camera device.</remarks>
        /// <remarks>http://tizen.org/privilege/recorder is needed if pipeline description accesses the recorder device.</remarks>
        /// <param name="description">The pipeline description. Refer to GStreamer manual or NNStreamer documentation for examples and the grammar.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="UnauthorizedAccessException">Thrown when the application does not have the required privilege.</exception>
        /// <exception cref="InvalidOperationException">Thrown when the method failed due to the wrong pipeline description or internal error.</exception>
        /// <since_tizen> 8 </since_tizen>
        public Pipeline(string description)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (string.IsNullOrEmpty(description))
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The pipeline description is invalid");
            }

            _stateChangedCallback = (state, _) =>
            {
                StateChanged?.Invoke(this, new StateChangedEventArgs(state));
            };

            NNStreamerError ret = Interop.Pipeline.Construct(description, _stateChangedCallback, IntPtr.Zero, out _handle);

            NNStreamer.CheckException(ret, "Failed to create Pipeline instance");

            /* Init node list */
            _nodeList = new Dictionary <string, NodeInfo>();
        }
Пример #13
0
        /// <summary>
        /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo.
        /// </summary>
        /// <param name="name">Name of Tensor.</param>
        /// <param name="type">Data element type of Tensor.</param>
        /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SizeLimit)</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public void AddTensorInfo(string name, TensorType type, int[] dimension)
        {
            NNStreamer.CheckNNStreamerSupport();

            int idx = _infoList.Count;

            if (idx >= Tensor.SizeLimit)
            {
                throw new IndexOutOfRangeException("Max size of the tensors is " + Tensor.SizeLimit);
            }
            _infoList.Add(new TensorInfo(name, type, dimension));

            if (_handle != IntPtr.Zero)
            {
                NNStreamerError ret = NNStreamerError.None;

                ret = Interop.Util.SetTensorsCount(_handle, _infoList.Count);
                NNStreamer.CheckException(ret, "Failed to set the number of tensors");

                UpdateInfoHandle(_handle, idx, name, type, dimension);
            }
        }
Пример #14
0
        /// <summary>
        /// Loads the neural network model and configures runtime environment without TensorsInfo
        /// </summary>
        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
        /// <param name="fwType">Types of Neural Network Framework (Default:NNFWType.Any)</param>
        /// <param name="hwType">Types of hardware resources to be used for NNFWs (Default: HWType.Any)</param>
        /// <param name="isDynamicMode">Support Dynamic Mode (Default: false)</param>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 8 </since_tizen>
        public SingleShot(string modelAbsPath, NNFWType fwType = NNFWType.Any, HWType hwType = HWType.Any, bool isDynamicMode = false)
        {
            NNStreamer.CheckNNStreamerSupport();

            CreateSingleShot(modelAbsPath, null, null, fwType, hwType, isDynamicMode);
        }
Пример #15
0
        /// <summary>
        /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo.
        /// </summary>
        /// <param name="type">Data element type of Tensor.</param>
        /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SizeLimit)</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public void AddTensorInfo(TensorType type, int[] dimension)
        {
            NNStreamer.CheckNNStreamerSupport();

            AddTensorInfo(null, type, dimension);
        }