Пример #1
0
        private void CreateSingleShot(string modelAbsPath,
                                      TensorsInfo inTensorInfo, TensorsInfo outTensorInfo,
                                      NNFWType FWType, HWType HWType, bool IsDynamicMode)
        {
            NNStreamerError ret         = NNStreamerError.None;
            IntPtr          input_info  = IntPtr.Zero;
            IntPtr          output_info = IntPtr.Zero;

            /* Check model path */
            if (string.IsNullOrEmpty(modelAbsPath))
            {
                ret = NNStreamerError.InvalidParameter;
            }
            NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);

            /* Set Dynamic Mode */
            _dynamicMode = IsDynamicMode;

            if (inTensorInfo != null)
            {
                input_info = inTensorInfo.GetTensorsInfoHandle();
                _inInfo    = inTensorInfo;
            }

            if (outTensorInfo != null)
            {
                output_info = outTensorInfo.GetTensorsInfoHandle();
                _outInfo    = outTensorInfo;
            }

            ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, FWType, HWType);
            NNStreamer.CheckException(ret, "fail to open the single inference engine");
        }
Пример #2
0
        /// <summary>
        /// Invokes the model with the given input data.
        /// </summary>
        /// <param name="inTensorsData">The input data to be inferred.</param>
        /// <returns>TensorsData instance which contains the inferred result.</returns>
        /// <feature>http://tizen.org/feature/machine_learning.inference</feature>
        /// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>
        /// <exception cref="TimeoutException">Thrown when failed to get the result from sink element.</exception>
        /// <exception cref="NotSupportedException">Thrown when the feature is not supported.</exception>
        /// <since_tizen> 6 </since_tizen>
        public TensorsData Invoke(TensorsData inTensorsData)
        {
            TensorsData     out_data   = null;
            IntPtr          outDataPtr = IntPtr.Zero;
            NNStreamerError ret        = NNStreamerError.None;

            NNStreamer.CheckNNStreamerSupport();

            if (inTensorsData == null)
            {
                string msg = "TensorsData is null";
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
            }

            if (_dynamicMode)
            {
                TensorsInfo inInfo = inTensorsData.TensorsInfo;
                if (inInfo == null)
                {
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "TensorsInfo is null");
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                IntPtr outInfoPtr = IntPtr.Zero;
                ret = Interop.SingleShot.InvokeSingleDynamic(_handle, inTensorsData.GetHandle(), inInfo.GetTensorsInfoHandle(), out outDataPtr, out outInfoPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single dynamic inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, outInfoPtr, true);
            }
            else
            {
                TensorsInfo data_inInfo = inTensorsData.TensorsInfo;

                if (!data_inInfo.Equals(_inInfo))
                {
                    string msg = "The TensorsInfo of Input TensorsData is different from that of SingleShot object";
                    throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, msg);
                }

                /* Apply all data */
                inTensorsData.PrepareInvoke();

                ret = Interop.SingleShot.InvokeSingle(_handle, inTensorsData.GetHandle(), out outDataPtr);
                NNStreamer.CheckException(ret, "fail to invoke the single inference");

                out_data = TensorsData.CreateFromNativeHandle(outDataPtr, data_inInfo.GetTensorsInfoHandle(), true);
            }
            return(out_data);
        }
Пример #3
0
        private void CreateSingleShot(string modelAbsPath, TensorsInfo inTensorInfo, TensorsInfo outTensorInfo)
        {
            NNStreamerError ret = NNStreamerError.None;
            IntPtr          input_info;
            IntPtr          output_info;

            /* Check model path */
            if (string.IsNullOrEmpty(modelAbsPath))
            {
                ret = NNStreamerError.InvalidParameter;
            }
            NNStreamer.CheckException(ret, "model path is invalid: " + modelAbsPath);

            input_info  = inTensorInfo.GetTensorsInfoHandle();
            output_info = outTensorInfo.GetTensorsInfoHandle();

            ret = Interop.SingleShot.OpenSingle(out _handle, modelAbsPath, input_info, output_info, NNFWType.Any, HWType.Any);
            NNStreamer.CheckException(ret, "fail to open the single inference engine");
        }
Пример #4
0
        private CustomFilter(string name, TensorsInfo inInfo, TensorsInfo outInfo, Func <TensorsData, TensorsData> filter)
        {
            /* Parameger check */
            if (name == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given name is null");
            }

            if (inInfo == null || outInfo == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given TensorsInfo is null");
            }

            if (filter == null)
            {
                throw NNStreamerExceptionFactory.CreateException(NNStreamerError.InvalidParameter, "Given Callback interface is null");
            }

            _nativeCallback = (in_data_handle, out_data_handle, _) =>
            {
                TensorsData inData  = TensorsData.CreateFromNativeHandle(in_data_handle, IntPtr.Zero, true, false);
                TensorsData outData = _filter(inData);
                out_data_handle = outData.GetHandle();
            };

            IntPtr handle = IntPtr.Zero;

            /* Create custom filter callback */
            NNStreamerError ret = Interop.Pipeline.RegisterCustomFilter(name,
                                                                        inInfo.GetTensorsInfoHandle(), outInfo.GetTensorsInfoHandle(), _nativeCallback, IntPtr.Zero, out handle);

            NNStreamer.CheckException(ret, "Failed to create custom filter function: " + name);

            /* Set internal member */
            _InInfo  = inInfo;
            _OutInfo = outInfo;
            _filter  = filter;

            Name   = name;
            Handle = handle;
        }