Esempio n. 1
0
        /// <summary>
        /// Invokes the model with the given input data.
        /// </summary>
        /// <param name="inTensorsData">The input data to be inferred.</param>
        /// <returns>TensorsData instance which contains the inferred result.</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public TensorsData Invoke(TensorsData inTensorsData)
        {
            TensorsData     out_data   = null;
            IntPtr          outDataPtr = IntPtr.Zero;
            NNStreamerError ret        = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsData == null)
            {
                string msg = "TensorsData is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            if (_dynamicMode)
            {
                TensorsInfo inInfo = inTensorsData.TensorsInfo;
                if (inInfo == null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                IntPtr outInfoPtr = IntPtr.Zero;
                ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true);
            }
            else
            {
                TensorsInfo data_inInfo = inTensorsData.TensorsInfo;

                if (!data_inInfo.Equals(_inInfo))
                {
                    string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object";
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true);
            }
            return(out_data);
        }
Esempio n. 2
0
            /// <summary>
            /// Inputs tensor data to source node.
            /// </summary>
            /// <param name="data">The tensors data</param>
            /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
            /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
            /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
            /// <exception cref="InvalidOperationException">Thrown when the node is invalid, or failed to input tensors data.</exception>
            /// <since_tizen> 8 </since_tizen>
            public void Input(TensorsData data)
            {
                if (data == null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given data is invalid");
                }

                if (!Valid)
                {
                    NNStreamer.CheckException(NNStreamerError.InvalidOperation, "Current node is invalid: " + Name);
                }

                data.PrepareInvoke();

                NNStreamerError ret = Interop.Pipeline.InputSrcData(Handle, data.GetHandle(), PipelineBufferPolicy.NotFreed);

                NNStreamer.CheckException(ret, "Failed to input tensors data to source node: " + Name);
            }
Esempio n. 3
0
        private CustomFilter(string name, TensorsInfo inInfo, TensorsInfo outInfo, Func <TensorsData, TensorsData> filter)
        {
            /* Parameger check */
            if (name == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given name is null");
            }

            if (inInfo == null || outInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given TensorsInfo is null");
            }

            if (filter == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given Callback interface is null");
            }

            _nativeCallback = (in_data_handle, out_data_handle, _) =>
            {
                TensorsData inData  = TensorsData.CreateFromNativeHandle(in_data_handle, IntPtr.Zero, true, false);
                TensorsData outData = _filter(inData);
                out_data_handle = outData.GetHandle();
            };

            IntPtr handle = IntPtr.Zero;

            /* Create custom filter callback */
            NNStreamerError ret = Interop.Pipeline.RegisterCustomFilter(name,
                                                                        inInfo.GetTensorsInfoHandle(), outInfo.GetTensorsInfoHandle(), _nativeCallback, IntPtr.Zero, out handle);

            NNStreamer.CheckException(ret, "Failed to create custom filter function: " + name);

            /* Set internal member */
            _InInfo  = inInfo;
            _OutInfo = outInfo;
            _filter  = filter;

            Name   = name;
            Handle = handle;
        }