Esempio n. 1
0
        /// <summary> Sets the property value for the given model.
        /// <para>A model/framework may support changing the model information, such as tensor dimension and data layout, after opening the model.</para>
        /// <para>If tries to change unavailable property or the model does not allow changing the information, this will raise an exception.</para>
        /// <para>For the details about the properties, see 'tensor_filter' plugin definition in <a href="https://github.com/nnstreamer/nnstreamer">NNStreamer</a>.</para>
        /// </summary>
        /// <param name="name">The property name</param>
        /// <param name="value">The property value</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <since_tizen> 8 </since_tizen>
        public void SetValue(string name, string value)
        {
            NNStreamerError ret = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            /* Check the argument */
            if (string.IsNullOrEmpty(name))
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid");
            }

            if (string.IsNullOrEmpty(value))
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property value is invalid");
            }

            ret = Interop.SingleShot.SetValue(_handle, name, value);
            if (ret != NNStreamerError.None)
            {
                if (ret == NNStreamerError.NotSupported)
                {
                    NNStreamer.CheckException(ret, "Failed to to set the property, the property name is not available.");
                }
                else
                {
                    NNStreamer.CheckException(ret, "Failed to to set the property, the property value is invalid.");
                }
            }
        }
Esempio n. 2
0
        /// <summary>
        /// Gets the normal node instance with given node name.
        /// </summary>
        /// <param name="name">The name of normal node.</param>
        /// <returns>The normal node instance</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <since_tizen> 8 </since_tizen>
        public Node GetNormal(string name)
        {
            NNStreamer.CheckNNStreamerSupport();

            /* Check the parameter */
            if (string.IsNullOrEmpty(name))
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid");
            }

            Node node;

            if (_nodeList.ContainsKey(name))
            {
                if (_nodeList[name].Type != NodeType.Normal)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a normal node");
                }

                node = (Node)_nodeList[name];
            }
            else
            {
                node = new Node(name, this);
                _nodeList.Add(name, node);
            }
            return(node);
        }
Esempio n. 3
0
        /// <summary>
        /// Gets the property value for the given model.
        /// </summary>
        /// <param name="name">The property name</param>
        /// <returns>The property value</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported, or given property is not available.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <since_tizen> 8 </since_tizen>
        public string GetValue(string name)
        {
            NNStreamerError ret = NNStreamerError.None;
            IntPtr          val = IntPtr.Zero;

            NNStreamer.CheckNNStreamerSupport();

            /* Check the argument */
            if (string.IsNullOrEmpty(name))
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The property name is invalid");
            }

            ret = Interop.SingleShot.GetValue(_handle, name, out val);
            if (ret != NNStreamerError.None)
            {
                if (ret == NNStreamerError.NotSupported)
                {
                    NNStreamer.CheckException(ret, "Failed to to get the property, the property name is not available.");
                }
                else
                {
                    NNStreamer.CheckException(ret, "Failed to to get the property, the property value is invalid.");
                }
            }

            return(Interop.Util.IntPtrToString(val));
        }
Esempio n. 4
0
        private void CheckDataBuffer(int index, byte[] data)
        {
            if (data == null)
            {
                string msg = "data is not valid";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            if (index >= Tensor.SizeLimit)
            {
                string msg = "Max size of the tensors is " + Tensor.SizeLimit;
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
            }

            if (_tensorsInfo != null)
            {
                if (index >= _tensorsInfo.Count)
                {
                    string msg = "Current information has " + _tensorsInfo.Count + " tensors";
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.QuotaExceeded, msg);
                }

                int size = _tensorsInfo.GetTensorSize(index);
                if (data.Length != size)
                {
                    string msg = "Invalid buffer size, required size is " + size.ToString();
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
                }
            }
        }
Esempio n. 5
0
        /// <summary>
        /// Gets the sink node instance with given node name.
        /// </summary>
        /// <param name="name">The name of sink node</param>
        /// <returns>The sink node instance</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <since_tizen> 8 </since_tizen>
        public SinkNode GetSink(string name)
        {
            NNStreamer.CheckNNStreamerSupport();

            /* Check the argument */
            if (string.IsNullOrEmpty(name))
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Node name is invalid");
            }

            SinkNode node;

            if (_nodeList.ContainsKey(name))
            {
                if (_nodeList[name].Type != NodeType.Sink)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, name + " is not a sink node");
                }

                node = (SinkNode)_nodeList[name];
            }
            else
            {
                node = new SinkNode(name, this);
                _nodeList.Add(name, node);
            }

            return(node);
        }
Esempio n. 6
0
        private static int _alreadyChecked = -1;    /* -1: not yet, 0: Not Support, 1: Support */

        internal static void CheckException(NNStreamerError error, string msg)
        {
            if (error != NNStreamerError.None)
            {
                Log.Error(NNStreamer.TAG, msg + ": " + error.ToString());
                throw NNStreamerExceptionFactory.CreateException(error, msg);
            }
        }
Esempio n. 7
0
 private void CheckIndex(int index)
 {
     if (index < 0 || index >= _dataList.Count)
     {
         string msg = "Invalid index [" + index + "] of the tensors";
         throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
     }
 }
Esempio n. 8
0
 private void CheckIndexBoundary(int idx)
 {
     if (idx < 0 || idx >= _infoList.Count)
     {
         string msg = "Invalid index [" + idx + "] of the tensors";
         throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
     }
 }
Esempio n. 9
0
            private void CheckGetParam(string propertyName)
            {
                NNStreamer.CheckNNStreamerSupport();

                if (string.IsNullOrEmpty(propertyName))
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Property name is invalid");
                }
            }
Esempio n. 10
0
        /// <summary>
        /// Loads the neural network model and configures runtime environment
        /// </summary>
        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsInfo == null || outTensorsInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
            }

            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, NNFWType.Any, HWType.Any, false);
        }
Esempio n. 11
0
        /// <summary>
        /// Loads the neural network model and configures runtime environment
        /// </summary>
        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="IOException">Thrown when constructing the pipeline is failed.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public SingleShot(string modelAbsPath, TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsInfo == null || outTensorsInfo == null)
            {
                string msg = "TensorsInfo is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo);
        }
Esempio n. 12
0
        /// <summary>
        /// Loads the neural network model and configures runtime environment with Neural Network Framework and HW information
        /// </summary>
        /// <param name="modelAbsPath">Absolute path to the neural network model file.</param>
        /// <param name="inTensorsInfo">Input TensorsInfo object</param>
        /// <param name="outTensorsInfo">Output TensorsInfo object for inference result</param>
        /// <param name="fwType">Types of Neural Network Framework</param>
        /// <param name="hwType">Types of hardware resources to be used for NNFWs</param>
        /// <param name="isDynamicMode">Support Dynamic Mode</param>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 8 </since_tizen>
        public SingleShot(string modelAbsPath,
                          TensorsInfo inTensorsInfo, TensorsInfo outTensorsInfo, NNFWType fwType, HWType hwType, bool isDynamicMode)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsInfo == null || outTensorsInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
            }

            CreateSingleShot(modelAbsPath, inTensorsInfo, outTensorsInfo, fwType, hwType, isDynamicMode);
        }
Esempio n. 13
0
        /// <summary>
        /// Sets the maximum amount of time to wait for an output, in milliseconds.
        /// </summary>
        /// <param name="ms">The time to wait for an output (milliseconds)</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <since_tizen> 8 </since_tizen>
        public void SetTimeout(int ms)
        {
            NNStreamer.CheckNNStreamerSupport();
            NNStreamerError ret = NNStreamerError.None;

            if (ms <= 0)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Invalid timeout: " + ms.ToString());
            }

            ret = Interop.SingleShot.SetTimeout(_handle, ms);
            NNStreamer.CheckException(ret, "fail to set the timeout!");
        }
Esempio n. 14
0
        /// <summary>
        /// Allocates a new TensorsData instance with the given tensors information.
        /// </summary>
        /// <param name="info">TensorsInfo object which has Tensor information</param>
        /// <returns>The TensorsInfo instance</returns>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 8 </since_tizen>
        public static TensorsData Allocate(TensorsInfo info)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (info == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
            }

            TensorsData retData = info.GetTensorsData();

            return(retData);
        }
Esempio n. 15
0
        /// <summary>
        /// Invokes the model with the given input data.
        /// </summary>
        /// <param name="inTensorsData">The input data to be inferred.</param>
        /// <returns>TensorsData instance which contains the inferred result.</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public TensorsData Invoke(TensorsData inTensorsData)
        {
            TensorsData     out_data   = null;
            IntPtr          outDataPtr = IntPtr.Zero;
            NNStreamerError ret        = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsData == null)
            {
                string msg = "TensorsData is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            if (_dynamicMode)
            {
                TensorsInfo inInfo = inTensorsData.TensorsInfo;
                if (inInfo == null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                IntPtr outInfoPtr = IntPtr.Zero;
                ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true);
            }
            else
            {
                TensorsInfo data_inInfo = inTensorsData.TensorsInfo;

                if (!data_inInfo.Equals(_inInfo))
                {
                    string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object";
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true);
            }
            return(out_data);
        }
Esempio n. 16
0
        /// <summary>
        /// Sets a tensor data to given index.
        /// </summary>
        /// <param name="index">The index of the tensor.</param>
        /// <param name="buffer">Raw tensor data to be set.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public void SetTensorData(int index, byte[] buffer)
        {
            NNStreamerError ret = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (buffer == null)
            {
                string msg = "buffer is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            ret = Interop.Util.SetTensorData(_handle, index, buffer, buffer.Length);
            NNStreamer.CheckException(ret, "unable to set the buffer of TensorsData: " + index.ToString());
        }
Esempio n. 17
0
            /// <summary>
            /// Selects input/output pad.
            /// </summary>
            /// <param name="padName">The pad name to be activated.</param>
            /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
            /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
            /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
            /// <exception cref="InvalidOperationException">Thrown when the node is invalid.</exception>
            /// <since_tizen> 8 </since_tizen>
            public void Select(string padName)
            {
                if (string.IsNullOrEmpty(padName))
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Pad name is invalid");
                }

                if (!Valid)
                {
                    NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
                }

                NNStreamerError ret = Interop.Pipeline.SelectSwitchPad(Handle, padName);

                NNStreamer.CheckException(ret, "Failed to select pad: " + padName);
            }
Esempio n. 18
0
        /// <summary>
        /// Invokes the model with the given input data.
        /// </summary>
        /// <param name="inTensorsData">The input data to be inferred.</param>
        /// <returns>TensorsData instance which contains the inferred result.</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="IOException">Thrown when failed to push an input data into source element.</exception>
        /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public TensorsData Invoke(TensorsData inTensorsData)
        {
            TensorsData     out_data;
            IntPtr          out_ptr = IntPtr.Zero;
            NNStreamerError ret     = NNStreamerError.None;

            if (inTensorsData == null)
            {
                string msg = "TensorsData is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.Handle, out out_ptr);
            NNStreamer.CheckException(ret, "fail to invoke the single inference engine");

            out_data = TensorsData.CreateFromNativeHandle(out_ptr);
            return(out_data);
        }
Esempio n. 19
0
            /// <summary>
            /// Inputs tensor data to source node.
            /// </summary>
            /// <param name="data">The tensors data</param>
            /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
            /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
            /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
            /// <exception cref="InvalidOperationException">Thrown when the node is invalid, or failed to input tensors data.</exception>
            /// <since_tizen> 8 </since_tizen>
            public void Input(TensorsData data)
            {
                if (data == null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given data is invalid");
                }

                if (!Valid)
                {
                    NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
                }

                data.PrepareInvoke();

                NNStreamerError ret = Interop.Pipeline.InputSrcData(Handle, data.GetHandle(), PipelineBufferPolicy.NotFreed);

                NNStreamer.CheckException(ret, "Failed to input tensors data to source node: " + Name);
            }
Esempio n. 20
0
        private CustomFilter(string name, TensorsInfo inInfo, TensorsInfo outInfo, Func <TensorsData, TensorsData> filter)
        {
            /* Parameger check */
            if (name == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given name is null");
            }

            if (inInfo == null || outInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given TensorsInfo is null");
            }

            if (filter == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given Callback interface is null");
            }

            _nativeCallback = (in_data_handle, out_data_handle, _) =>
            {
                TensorsData inData  = TensorsData.CreateFromNativeHandle(in_data_handle, IntPtr.Zero, true, false);
                TensorsData outData = _filter(inData);
                out_data_handle = outData.GetHandle();
            };

            IntPtr handle = IntPtr.Zero;

            /* Create custom filter callback */
            NNStreamerError ret = Interop.Pipeline.RegisterCustomFilter(name,
                                                                        inInfo.GetTensorsInfoHandle(), outInfo.GetTensorsInfoHandle(), _nativeCallback, IntPtr.Zero, out handle);

            NNStreamer.CheckException(ret, "Failed to create custom filter function: " + name);

            /* Set internal member */
            _InInfo  = inInfo;
            _OutInfo = outInfo;
            _filter  = filter;

            Name   = name;
            Handle = handle;
        }
Esempio n. 21
0
        internal static void CheckNNStreamerSupport()
        {
            if (_alreadyChecked == 1)
            {
                return;
            }

            string msg = "Machine Learning Inference Feature is not supported.";

            if (_alreadyChecked == 0)
            {
                Log.Error(NNStreamer.TAG, msg);
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
            }

            /* Feature Key check */
            bool isSupported = false;
            bool error       = Information.TryGetValue <bool>(FeatureKey, out isSupported);

            if (!error || !isSupported)
            {
                _alreadyChecked = 0;

                Log.Error(NNStreamer.TAG, msg);
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
            }

            /* Check required so files */
            try
            {
                Interop.Util.CheckNNFWAvailability(NNFWType.TensorflowLite, HWType.CPU, out isSupported);
            }
            catch (DllNotFoundException)
            {
                _alreadyChecked = 0;
                Log.Error(NNStreamer.TAG, msg);
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.NotSupported, msg);
            }

            _alreadyChecked = 1;
        }
Esempio n. 22
0
        /// <summary>
        /// Creates a new Pipeline instance with the given pipeline description
        /// </summary>
        /// <remarks>http://tizen.org/privilege/mediastorage is needed if pipeline description is relevant to media storage.</remarks>
        /// <remarks>http://tizen.org/privilege/externalstorage is needed if pipeline description is relevant to external storage.</remarks>
        /// <remarks>http://tizen.org/privilege/camera is needed if pipeline description accesses the camera device.</remarks>
        /// <remarks>http://tizen.org/privilege/recorder is needed if pipeline description accesses the recorder device.</remarks>
        /// <param name="description">The pipeline description. Refer to GStreamer manual or NNStreamer documentation for examples and the grammar.</param>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="UnauthorizedAccessException">Thrown when the application does not have the required privilege.</exception>
        /// <exception cref="InvalidOperationException">Thrown when the method failed due to the wrong pipeline description or internal error.</exception>
        /// <since_tizen> 8 </since_tizen>
        public Pipeline(string description)
        {
            NNStreamer.CheckNNStreamerSupport();

            if (string.IsNullOrEmpty(description))
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "The pipeline description is invalid");
            }

            _stateChangedCallback = (state, _) =>
            {
                StateChanged?.Invoke(this, new StateChangedEventArgs(state));
            };

            NNStreamerError ret = Interop.Pipeline.Construct(description, _stateChangedCallback, IntPtr.Zero, out _handle);

            NNStreamer.CheckException(ret, "Failed to create Pipeline instance");

            /* Init node list */
            _nodeList = new Dictionary <string, NodeInfo>();
        }