public void Initialize(string labelPath, string modelPath)
        {
            if (initialized)
            {
                return;
            }

            useNumThreads = Device.RuntimePlatform == Device.Android;

            var labelData    = File.ReadAllBytes(labelPath);
            var labelContent = Encoding.Default.GetString(labelData);

            labels = labelContent
                     .Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)
                     .Select(x => x.Trim())
                     .ToArray();

            model = new FlatBufferModel(modelPath);
            if (!model.CheckModelIdentifier())
            {
                throw new Exception("Model identifier check failed");
            }

            quantizedColors = new byte[ModelInputSize * ModelInputSize * 3];

            initialized = true;
        }
Example #2
0
        private void ImportGraph()
        {
            if (_interpreter != null)
            {
                _interpreter.Dispose();
            }

#if UNITY_EDITOR || UNITY_IOS || UNITY_ANDROID || UNITY_STANDALONE
            UnityEngine.Debug.Log("Reading model definition");
#endif

            String modelFileName = _downloadManager.Files[0].LocalFile;
            String labelFileName = _downloadManager.Files[1].LocalFile;

            System.Diagnostics.Debug.Assert(File.Exists(modelFileName), String.Format("File {0} doesn't exist", modelFileName));
            System.Diagnostics.Debug.Assert(File.Exists(labelFileName), String.Format("File {0} doesn't exist", labelFileName));

            if (!File.Exists(modelFileName) || !File.Exists(labelFileName))
            {
                return;
            }

            if (_labels == null)
            {
                _labels = File.ReadAllLines(labelFileName);
            }

            if (_model == null)
            {
                _model = new FlatBufferModel(modelFileName);
                if (!_model.CheckModelIdentifier())
                {
                    throw new Exception("Model identifier check failed");
                }
            }

            if (_interpreter == null)
            {
                _interpreter = new Interpreter(_model);
                Status allocateTensorStatus = _interpreter.AllocateTensors();
                if (allocateTensorStatus == Status.Error)
                {
                    throw new Exception("Failed to allocate tensor");
                }
            }

            if (_inputTensor == null)
            {
                int[] input = _interpreter.InputIndices;
                _inputTensor = _interpreter.GetTensor(input[0]);
            }

            if (_outputTensor == null)
            {
                int[] output = _interpreter.OutputIndices;
                _outputTensor = _interpreter.GetTensor(output[0]);
            }
        }
Example #3
0
        private static String GetModelInfo(String fileName)
        {
            StringBuilder modelResult = new StringBuilder();

            try
            {
                modelResult.Append(String.Format("File Name:{0}{1}", fileName, Environment.NewLine));
                //string contents = System.Text.Encoding.UTF8.GetString(fileData.DataArray);
                using (FlatBufferModel fbm = new FlatBufferModel(fileName))
                {
                    if (!fbm.CheckModelIdentifier())
                    {
                        throw new Exception("Model identifier check failed");
                    }

                    using (Interpreter interpreter = new Interpreter(fbm))
                    {
                        Status allocateTensorStatus = interpreter.AllocateTensors();
                        if (allocateTensorStatus == Status.Error)
                        {
                            throw new Exception("Failed to allocate tensor");
                        }
                        int[] input = interpreter.InputIndices;
                        for (int i = 0; i < input.Length; i++)
                        {
                            Tensor inputTensor = interpreter.GetTensor(input[i]);

                            modelResult.Append(String.Format("Input {0} ({1}): {2}{3}{4}", i, inputTensor.Name,
                                                             inputTensor.Type, IntArrayToString(inputTensor.Dims), Environment.NewLine));
                        }

                        int[] output = interpreter.OutputIndices;
                        for (int i = 0; i < output.Length; i++)
                        {
                            Tensor outputTensor = interpreter.GetTensor(output[i]);

                            modelResult.Append(String.Format("Output {0} ({1}): {2}{3}{4}", i, outputTensor.Name,
                                                             outputTensor.Type, IntArrayToString(outputTensor.Dims), Environment.NewLine));
                        }
                    }
                }

                return(modelResult.ToString());
            }
            catch (Exception ex)
            {
                modelResult.Append(String.Format("Exception processing file {0}: {1}{2} ", fileName, ex.ToString(),
                                                 Environment.NewLine));
                return(modelResult.ToString());
            }
        }
Example #4
0
        /// <summary>
        /// Release unmanaged memory associated with this coco ssd mobilenet model.
        /// </summary>
        protected override void DisposeObject()
        {
            if (_interpreter != null)
            {
                _interpreter.Dispose();
                _interpreter = null;
            }

            if (_model != null)
            {
                _model.Dispose();
                _model = null;
            }
        }
Example #5
0
        public bool Initialize(Stream modelData, bool useNumThreads)
        {
            using (var builder = new TextRecognizer.Builder(MainActivity.context))
            {
                txtRecognizer = builder.Build();
            }

            using (var ms = new MemoryStream())
            {
                modelData.CopyTo(ms);

                model = new FlatBufferModel(ms.ToArray());
            }

            if (!model.CheckModelIdentifier())
            {
                return(false);
            }

            var op = new BuildinOpResolver();

            interpreter = new Interpreter(model, op);

            if (useNumThreads)
            {
                interpreter.SetNumThreads(Environment.ProcessorCount);
            }

            var allocateTensorStatus = interpreter.AllocateTensors();

            if (allocateTensorStatus == Status.Error)
            {
                return(false);
            }

            var input = interpreter.GetInput();

            inputTensor = interpreter.GetTensor(input[0]);

            var output      = interpreter.GetOutput();
            var outputIndex = output[0];

            outputTensors = new Tensor[output.Length];
            for (var i = 0; i < output.Length; i++)
            {
                outputTensors[i] = interpreter.GetTensor(outputIndex + i);
            }

            return(true);
        }
Example #6
0
        protected virtual void Dispose(bool disposing)
        {
            //if (disposing)
            //{
            //    // Free managed resources
            //}

            // Free unmanaged resources
            if (_interpreter != null)
            {
                _interpreter.Dispose();
                _interpreter = null;
            }

            if (_model != null)
            {
                _model.Dispose();
                _model = null;
            }
        }
Example #7
0
        /// <summary>
        /// Release the IDiposable resources
        /// </summary>
        /// <param name="disposing">True if called from Dispose()</param>
        protected virtual void Dispose(bool disposing)
        {
            if (null == interpreter)
            {
                return;
            }

            // Dispose of the unmanaged resouces
            if (disposing)
            {
                InputTensor.Dispose();
                OutputTensor.Dispose();
                interpreter.Dispose();
                flatBufferModel.Dispose();
            }

            InputTensor     = null;
            OutputTensor    = null;
            interpreter     = null;
            flatBufferModel = null;
        }
Example #8
0
        public MLModel(string modelFilename)
        {
            if (modelFilename == null)
            {
                throw new ArgumentNullException(nameof(modelFilename));
            }

            if (!File.Exists(modelFilename))
            {
                throw new Exception(modelFilename + " does not exist.");
            }

            var _model = new FlatBufferModel(modelFilename);

            if (!_model.CheckModelIdentifier())
            {
                throw new Exception("Model identifier check failed");
            }

            _interpreter = new Interpreter(_model);

            Status allocateTensorStatus = _interpreter.AllocateTensors();

            if (allocateTensorStatus == Status.Error)
            {
                throw new Exception("Failed to allocate tensor");
            }

            if (_inputTensor == null)
            {
                int[] input = _interpreter.InputIndices;
                _inputTensor = _interpreter.GetTensor(input[0]);
            }

            if (_outputTensor == null)
            {
                int[] output = _interpreter.OutputIndices;
                _outputTensor = _interpreter.GetTensor(output[0]);
            }
        }
Example #9
0
    private void onDownloadCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e)
    {
        String localFileName = _downloadManager.Files[0].LocalFile;

        if (_labels == null)
        {
            _labels = File.ReadAllLines(_downloadManager.Files[1].LocalFile);
        }

        System.Diagnostics.Debug.Assert(File.Exists(localFileName), "File doesn't exist");
        FileInfo file = new FileInfo(localFileName);

        if (_model == null)
        {
            _model = new FlatBufferModel(localFileName);
            if (!_model.CheckModelIdentifier())
            {
                throw new Exception("Model indentifier check failed");
            }
        }

        if (_resolver == null)
        {
            _resolver = new BuildinOpResolver();
        }

        if (_interpreter == null)
        {
            _interpreter = new Interpreter(_model, _resolver);
            Status allocateTensorStatus = _interpreter.AllocateTensors();
            if (allocateTensorStatus == Status.Error)
            {
                throw new Exception("Failed to allocate tensor");
            }
        }
    }
Example #10
0
        private void ImportGraph(IDelegate optDelegate = null)
        {
#if UNITY_EDITOR || UNITY_IOS || UNITY_ANDROID || UNITY_STANDALONE
            UnityEngine.Debug.Log("Reading model definition");
#endif

            if (_labels == null)
            {
                String labelFileName = _downloadManager.Files[1].LocalFile;
                System.Diagnostics.Debug.Assert(File.Exists(labelFileName), String.Format("File {0} doesn't exist", labelFileName));

                if (!File.Exists(labelFileName))
                {
                    throw new Exception("Label file does not exist!");
                }
                _labels = File.ReadAllLines(labelFileName);
            }

            if (_model == null)
            {
                String modelFileName = _downloadManager.Files[0].LocalFile;
                System.Diagnostics.Debug.Assert(File.Exists(modelFileName), String.Format("File {0} doesn't exist", modelFileName));

                if (!File.Exists(modelFileName))
                {
                    throw new Exception("Model file does not exist!");
                }
                _model = new FlatBufferModel(modelFileName);
                if (!_model.CheckModelIdentifier())
                {
                    throw new Exception("Model identifier check failed");
                }
            }

            if (_interpreter == null)
            {
                _interpreter = new Interpreter(_model);

                bool isAndroid = false;
#if UNITY_ANDROID && (!UNITY_EDITOR)
                isAndroid = true;
#else
                System.Reflection.Assembly monoAndroidAssembly = Emgu.TF.Util.Toolbox.FindAssembly("Mono.Android.dll");
                if (monoAndroidAssembly != null)
                {
                    isAndroid = true;
                }
#endif
                if (isAndroid)
                {
                    //_interpreter.ModifyGraphWithDelegate(TfLiteInvoke.DefaultGpuDelegateV2);
                    _interpreter.ModifyGraphWithDelegate(TfLiteInvoke.DefaultNnApiDelegate);
                    //_interpreter.UseNNAPI(false);
                    _interpreter.SetNumThreads(4);
                }
                //_interpreter.Build(_model);
                if (optDelegate != null)
                {
                    _interpreter.ModifyGraphWithDelegate(optDelegate);
                }
                Status allocateTensorStatus = _interpreter.AllocateTensors();
                if (allocateTensorStatus == Status.Error)
                {
                    throw new Exception("Failed to allocate tensor");
                }
            }

            if (_inputTensor == null)
            {
                _inputTensor = _interpreter.Inputs[0];
            }

            if (_outputTensors == null)
            {
                _outputTensors = _interpreter.Outputs;
            }
        }
Example #11
0
        private void onDownloadCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e)
        {
            String localFileName = _downloadManager.Files[0].LocalFile;

            if (_labels == null)
            {
                _labels = File.ReadAllLines(_downloadManager.Files[1].LocalFile);
            }

            System.Diagnostics.Debug.Assert(File.Exists(localFileName), "File doesn't exist");
            FileInfo file = new FileInfo(localFileName);

            if (_model == null)
            {
                _model = new FlatBufferModel(localFileName);
                if (!_model.CheckModelIdentifier())
                {
                    throw new Exception("Model indentifier check failed");
                }
            }

            if (_resolver == null)
            {
                _resolver = new BuildinOpResolver();
            }

            if (_interpreter == null)
            {
                _interpreter = new Interpreter(_model, _resolver);
                Status allocateTensorStatus = _interpreter.AllocateTensors();
                if (allocateTensorStatus == Status.Error)
                {
                    throw new Exception("Failed to allocate tensor");
                }
            }

            int[] input  = _interpreter.GetInput();
            int[] output = _interpreter.GetOutput();

            Tensor inputTensor  = _interpreter.GetTensor(input[0]);
            Tensor outputTensor = _interpreter.GetTensor(output[0]);

            NativeImageIO.ReadImageFileToTensor(_image[0], inputTensor.DataPointer, 224, 224, 128.0f, 1.0f / 128.0f);
            Stopwatch watch = Stopwatch.StartNew();

            _interpreter.Invoke();
            watch.Stop();

            float[] probability = outputTensor.GetData() as float[];

            String resStr = String.Empty;

            if (probability != null)
            {
                float maxVal = 0;
                int   maxIdx = 0;
                for (int i = 0; i < probability.Length; i++)
                {
                    if (probability[i] > maxVal)
                    {
                        maxVal = probability[i];
                        maxIdx = i;
                    }
                }
                resStr = String.Format("Object is {0} with {1}% probability. Recognition completed in {2} milliseconds.", _labels[maxIdx], maxVal * 100, watch.ElapsedMilliseconds);
            }

            SetImage(_image[0]);
            SetMessage(resStr);
        }
Example #12
0
        /// <summary>
        /// Initializes the node to process the
        /// </summary>
        /// <param name="path">The path to the model file</param>
        /// <param name="inputName">The name of the input to use</param>
        /// <param name="outputName">The name of the output to use</param>
        /// <param name="labels">The labels used for each of the classification
        /// steps.</param>
        /// <param name="numColumns">The number of columns in the grid of
        /// classifications.</param>
        /// <param name="numRows">The number of rows in the grid of classifications.</param>
        public Classifier_TFLite(string path, string inputName, string outputName,
                                 IReadOnlyList <string> labels,
                                 int numColumns = 1, int numRows = 1)
        {
            // Load the flatbuffer model definition for the TFlite model
            flatBufferModel = new FlatBufferModel(path);
            if (!flatBufferModel.CheckModelIdentifier())
            {
                throw new Exception("Model identifier check failed");
            }
            // Set up the TFLite interpreter to allow using the model
            interpreter = new Interpreter(flatBufferModel);
            var status = interpreter.AllocateTensors();

            if (status == Status.Error)
            {
                throw new Exception("Failed to allocate tensor");
            }

            // Get the inputs and outputs from the model
            // Find the input
            foreach (var tensor in interpreter.Inputs)
            {
                InputTensor = tensor;
                // Does the specified input match the name of this tensor?
                if (inputName == tensor.Name)
                {
                    // Yes, they match
                    break;
                }
            }
            // Find the output
            foreach (var tensor in interpreter.Outputs)
            {
                OutputTensor = tensor;
                // Does the specified output match the name of this tensor?
                if (outputName == tensor.Name)
                {
                    // Yes, they match
                    break;
                }
            }

            // Get the size from input dimensions
            // If the number of elements in Dims is 3, they are:
            //   0: the height
            //   1: the width
            //   2: the number of channels (ie 3 for RGB)
            // If the number of elements in Dims is 4, then:
            //   0: the number of images passed
            //   1: the height
            //   2: the width
            //   3: the number of channels (ie 3 for RGB)
            var dim    = InputTensor.Dims;
            var dimOfs = 0;

            if (dim.Length > 3)
            {
                dimOfs = 1;
            }
            var height = dim[dimOfs + 0];
            var width  = dim[dimOfs + 1];

            inputSize = new Size(width, height);

            // Get whether it is a float, int, etc
            inputType = InputTensor.Type;
            if (inputType != DataType.Float32 && inputType != DataType.UInt8)
            {
                throw new Exception(String.Format($"Data Type of {inputType} is not supported."));
            }

            this.labels = labels;

            isLocalization = numColumns * numRows != 1;
            if (!isLocalization)
            {
                // Create an array to be sorted
                indices = new int[labels.Count];
                for (var idx = 0; idx < indices.Length; idx++)
                {
                    indices[idx] = idx;
                }
            }

            // Allocate the grid of regions that are classified
            this.numColumns = numColumns;
            this.numRows    = numRows;
            outputGrid      = new Classification[isLocalization?numRows:(labels.Count < 5?numRows:5), numColumns];
        }
Example #13
0
        private void ImportGraph()
        {
#if UNITY_EDITOR || UNITY_IOS || UNITY_ANDROID || UNITY_STANDALONE
            UnityEngine.Debug.Log("Reading model definition");
#endif

            if (_labels == null)
            {
                String labelFileName = _downloadManager.Files[1].LocalFile;
                if (!File.Exists(labelFileName))
                {
                    throw new Exception(String.Format("File {0} doesn't exist", labelFileName));
                }
                _labels = File.ReadAllLines(labelFileName);
            }

            if (_model == null)
            {
                String modelFileName = _downloadManager.Files[0].LocalFile;
                if (!File.Exists(modelFileName))
                {
                    throw new Exception(String.Format("File {0} doesn't exist", modelFileName));
                }
                _model = new FlatBufferModel(modelFileName);
                if (!_model.CheckModelIdentifier())
                {
                    throw new Exception("Model identifier check failed");
                }
            }

            if (_interpreter == null)
            {
                _interpreter = new Interpreter(_model);

                /*
                 * using (NNAPIDelegate d = new NNAPIDelegate())
                 * {
                 *  if (d.IsSupported)
                 *  {
                 *      _interpreter.UseNNAPI(true);
                 *  }
                 * }*/
                Status allocateTensorStatus = _interpreter.AllocateTensors();
                if (allocateTensorStatus == Status.Error)
                {
                    throw new Exception("Failed to allocate tensor");
                }
            }

            if (_inputTensor == null)
            {
                int[] input = _interpreter.InputIndices;
                _inputTensor = _interpreter.GetTensor(input[0]);
            }

            if (_outputTensor == null)
            {
                int[] output = _interpreter.OutputIndices;
                _outputTensor = _interpreter.GetTensor(output[0]);
            }
        }