Exemple #1
0
        /// <summary>
        /// Get a copy of the tensor data as a managed array
        /// </summary>
        /// <param name="jagged">If true, return the data as a jagged array. Otherwise, return a single dimension array.</param>
        /// <returns>A copy of the tensor data as a managed array</returns>
        public Array GetData(bool jagged = true)
        {
            DataType type = this.Type;
            Type     t    = TfLiteInvoke.GetNativeType(type);

            if (t == null)
            {
                return(null);
            }

            Array array;
            int   byteSize = ByteSize;

            if (jagged)
            {
                int[] dim = this.Dims;
                array = Array.CreateInstance(t, dim);
            }
            else
            {
                int len = byteSize / Marshal.SizeOf(t);
                array = Array.CreateInstance(t, len);
            }

            GCHandle handle = GCHandle.Alloc(array, GCHandleType.Pinned);

            TfLiteInvoke.tfeMemcpy(handle.AddrOfPinnedObject(), DataPointer, byteSize);
            handle.Free();
            return(array);
        }
Exemple #2
0
 /// <summary>
 /// Release all the unmanaged memory associated with this delegate
 /// </summary>
 protected override void DisposeObject()
 {
     if (IntPtr.Zero != _ptr)
     {
         TfLiteInvoke.tfeGpuDelegateDelete(ref _ptr);
     }
 }
Exemple #3
0
 /// <summary>
 /// Release all the unmanaged memory associated with this interpreter
 /// </summary>
 protected override void DisposeObject()
 {
     if (ptr != IntPtr.Zero)
     {
         TfLiteInvoke.TfeInterpreterRelease(ref ptr);
     }
 }
Exemple #4
0
 /// <summary>
 /// Release all the unmanaged memory associated with this model
 /// </summary>
 protected override void DisposeObject()
 {
     if (IntPtr.Zero != _ptr)
     {
         TfLiteInvoke.tfeFlatBufferModelRelease(ref _ptr);
     }
 }
Exemple #5
0
 /// <summary>
 /// Release all the unmanaged memory associated with this model
 /// </summary>
 protected override void DisposeObject()
 {
     if (IntPtr.Zero != _ptr)
     {
         TfLiteInvoke.tfeDynamicBufferRelease(ref _ptr);
     }
 }
Exemple #6
0
 /// <summary>
 /// Release all the unmanaged memory associated with this model
 /// </summary>
 protected override void DisposeObject()
 {
     if (IntPtr.Zero != _ptr)
     {
         TfLiteInvoke.tfeBuiltinOpResolverRelease(ref _ptr);
         _opResolverPtr = IntPtr.Zero;
     }
 }
 /// <summary>
 /// Release all the unmanaged memory associated with this delegate
 /// </summary>
 protected override void DisposeObject()
 {
     if (IntPtr.Zero != _ptr)
     {
         TfLiteInvoke.tfeStatefulNnApiDelegateRelease(ref _ptr);
         _delegatePtr = IntPtr.Zero;
     }
 }
Exemple #8
0
 /// <summary>
 /// Release all the unmanaged memory associated with this model
 /// </summary>
 protected override void DisposeObject()
 {
     if (ptr != IntPtr.Zero)
     {
         TfLiteInvoke.TfeBuiltinOpResolverRelease(ref ptr);
         opResolverPtr = IntPtr.Zero;
     }
 }
Exemple #9
0
        /// <summary>
        /// Add string to dynamic buffer by resizing the buffer and copying the data.
        /// </summary>
        /// <param name="str">The string to add to the dynamic buffer</param>
        public void AddString(String str)
        {
            byte[]   rawString = Encoding.ASCII.GetBytes(str);
            GCHandle handle    = GCHandle.Alloc(rawString, GCHandleType.Pinned);

            TfLiteInvoke.tfeDynamicBufferAddString(_ptr, handle.AddrOfPinnedObject(), rawString.Length);
            handle.Free();
        }
Exemple #10
0
        public int[] GetOutput()
        {
            int size = TfLiteInvoke.tfeInterpreterGetOutputSize(_ptr);

            int[]    output = new int[size];
            GCHandle handle = GCHandle.Alloc(output, GCHandleType.Pinned);

            TfLiteInvoke.tfeInterpreterGetOutput(_ptr, handle.AddrOfPinnedObject());
            handle.Free();
            return(output);
        }
Exemple #11
0
 /// <summary>
 /// Release all the unmanaged memory associated with this model
 /// </summary>
 protected override void DisposeObject()
 {
     if (IntPtr.Zero != _ptr)
     {
         TfLiteInvoke.tfeFlatBufferModelRelease(ref _ptr);
     }
     if (_buffer != null)
     {
         _handle.Free();
         _buffer = null;
     }
 }
Exemple #12
0
        /// <summary>
        /// Get the list of tensor index of the outputs tensors.
        /// </summary>
        /// <returns>The list of tensor index of the outputs tensors.</returns>
        private int[] GetOutput()
        {
            int size = TfLiteInvoke.tfeInterpreterGetOutputSize(_ptr);

            int[]    output     = new int[size];
            GCHandle handle     = GCHandle.Alloc(output, GCHandleType.Pinned);
            int      outputSize = TfLiteInvoke.tfeInterpreterGetOutput(_ptr, handle.AddrOfPinnedObject());

            Debug.Assert(outputSize == size, "Output size do not match!");
            handle.Free();
            return(output);
        }
        /// <summary>
        /// Release all the unmanaged memory associated with this model
        /// </summary>
        protected override void DisposeObject()
        {
            if (ptr != IntPtr.Zero)
            {
                TfLiteInvoke.TfeFlatBufferModelRelease(ref ptr);
            }

            if (buffer != null)
            {
                handle.Free();
                buffer = null;
            }
        }
Exemple #14
0
        public FlatBufferModel(byte[] buffer)
        {
            GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned);

            try
            {
                _ptr = TfLiteInvoke.tfeFlatBufferModelBuildFromBuffer(handle.AddrOfPinnedObject(), buffer.Length);
            }
            finally
            {
                handle.Free();
            }
        }
Exemple #15
0
 /// <summary>
 /// Release all the unmanaged memory associated with this IntArray
 /// </summary>
 protected override void DisposeObject()
 {
     if (IntPtr.Zero != _ptr)
     {
         if (_needDispose)
         {
             TfLiteInvoke.tfeIntArrayRelease(ref _ptr);
         }
         else
         {
             _ptr = IntPtr.Zero;
         }
     }
 }
Exemple #16
0
        /*
         * private Interpreter()
         * {
         *  _ptr = TfLiteInvoke.tfeInterpreterCreate();
         * }*/

        /// <summary>
        /// Create an interpreter from a flatbuffer model
        /// </summary>
        /// <param name="flatBufferModel">The flat buffer model.</param>
        /// <param name="resolver">An instance that implements the Resolver interface which maps custom op names and builtin op codes to op registrations.</param>
        public Interpreter(FlatBufferModel flatBufferModel, IOpResolver resolver = null)
        {
            if (resolver == null)
            {
                using (BuildinOpResolver buildinResolver = new BuildinOpResolver())
                {
                    _ptr = TfLiteInvoke.tfeInterpreterCreateFromModel(flatBufferModel.Ptr, ((IOpResolver)buildinResolver).OpResolverPtr);
                }
            }
            else
            {
                _ptr = TfLiteInvoke.tfeInterpreterCreateFromModel(flatBufferModel.Ptr, resolver.OpResolverPtr);
            }
        }
Exemple #17
0
 /// <summary>
 /// Builds a model based on a pre-loaded flatbuffer.
 /// </summary>
 /// <param name="buffer">The buffer where the FlatBufferModel will be loaded from.</param>
 public FlatBufferModel(byte[] buffer)
 {
     _buffer = new byte[buffer.Length];
     Array.Copy(buffer, _buffer, _buffer.Length);
     _handle = GCHandle.Alloc(_buffer, GCHandleType.Pinned);
     try
     {
         _ptr = TfLiteInvoke.tfeFlatBufferModelBuildFromBuffer(_handle.AddrOfPinnedObject(), buffer.Length);
     } catch
     {
         _handle.Free();
         _buffer = null;
         throw;
     }
 }
Exemple #18
0
        /// <summary>
        /// Resize the input tensor
        /// </summary>
        /// <param name="inputIndex">The index of the input tensor</param>
        /// <param name="dimension">The new dimension for the input tensor</param>
        /// <returns>Status of success or failure.</returns>
        public Status ResizeInputTensor(int inputIndex, int[] dimension)
        {
            GCHandle handle = GCHandle.Alloc(dimension, GCHandleType.Pinned);

            try
            {
                return(TfLiteInvoke.tfeInterpreterResizeInputTensor(
                           _ptr,
                           inputIndex,
                           handle.AddrOfPinnedObject(),
                           dimension.Length));
            }
            finally
            {
                handle.Free();
            }
        }
Exemple #19
0
        /// <summary>
        /// Get a copy of the tensor data as a managed array
        /// </summary>
        /// <returns>A copy of the tensor data as a managed array</returns>
        public Array GetData()
        {
            DataType categoria = this.Type;
            Type t = GetNativeType(categoria);
            if (t == null)
            {
                return null;
            }

            int byteSize = ByteSize;
            Array array;

            int len = byteSize / Marshal.SizeOf(t);
            array = Array.CreateInstance(t, len);

            GCHandle handle = GCHandle.Alloc(array, GCHandleType.Pinned);
            TfLiteInvoke.TfeMemcpy(handle.AddrOfPinnedObject(), DataPointer, byteSize);
            handle.Free();
            return array;
        }
Exemple #20
0
        /// <summary>
        /// Builds a model based on a file.
        /// </summary>
        /// <param name="filename">The name of the file where the FlatBufferModel will be loaded from.</param>
        public FlatBufferModel(String filename)
        {
            if (!File.Exists(filename))
            {
                throw new FileNotFoundException(String.Format("File {0} does not exist", filename));
            }

            try
            {
                _ptr = TfLiteInvoke.tfeFlatBufferModelBuildFromFile(filename);
            }
            catch (Exception e)
            {
                byte[] buffer = File.ReadAllBytes(filename);
                if (buffer.Length == 0)
                {
                    throw new FileNotFoundException(String.Format("File {0} is empty", filename));
                }
                ReadModelFromBuffer(buffer);
            }
        }
Exemple #21
0
 /// <summary>
 /// Fill content into a string tensor.
 /// </summary>
 /// <param name="tensor">The string tensor</param>
 public void WriteToTensor(Tensor tensor, IntArray newShape = null)
 {
     TfLiteInvoke.tfeDynamicBufferWriteToTensor(_ptr, tensor, newShape == null ? IntPtr.Zero : newShape);
 }
Exemple #22
0
 /// <summary>
 /// Create a new dynamic buffer.
 /// </summary>
 public DynamicBuffer()
 {
     _ptr = TfLiteInvoke.tfeDynamicBufferCreate();
 }
Exemple #23
0
 /// <summary>
 /// GPU delegate for iOS using metal
 /// </summary>
 public GpuDelegate()
 {
     _ptr = TfLiteInvoke.tfeGpuDelegateCreate();
 }
Exemple #24
0
        public String GetOutputName(int index)
        {
            IntPtr namePtr = TfLiteInvoke.tfeInterpreterGetOutputName(_ptr, index);

            return(Marshal.PtrToStringAnsi(namePtr));
        }
 /// <summary>
 /// Create a Stategul NNAPI delegate
 /// </summary>
 public StatefulNnApiDelegate()
 {
     _ptr = TfLiteInvoke.tfeStatefulNnApiDelegateCreate(ref _delegatePtr);
 }
Exemple #26
0
 /// <summary>
 /// Set the number of threads available to the interpreter.
 /// </summary>
 /// <param name="numThreads"></param>
 public void SetNumThreads(int numThreads)
 {
     TfLiteInvoke.tfeInterpreterSetNumThreads(_ptr, numThreads);
 }
Exemple #27
0
 public Tensor GetTensor(int index)
 {
     return(new Tensor(TfLiteInvoke.tfeInterpreterGetTensor(_ptr, index), false));
 }
Exemple #28
0
 public Status Invoke()
 {
     return(TfLiteInvoke.tfeInterpreterInvoke(_ptr));
 }
Exemple #29
0
 public Status AllocateTensors()
 {
     return(TfLiteInvoke.tfeInterpreterAllocateTensors(_ptr));
 }
Exemple #30
0
        /*
         * private Interpreter()
         * {
         *  _ptr = TfLiteInvoke.tfeInterpreterCreate();
         * }*/

        public Interpreter(FlatBufferModel flatBufferModel, IOpResolver resolver)
        {
            _ptr = TfLiteInvoke.tfeInterpreterCreateFromModel(flatBufferModel.Ptr, resolver.OpResolverPtr);
        }