Esempio n. 1
0
        public bool Initialize(Stream modelData, bool useNumThreads)
        {
            using (var builder = new TextRecognizer.Builder(MainActivity.context))
            {
                txtRecognizer = builder.Build();
            }

            using (var ms = new MemoryStream())
            {
                modelData.CopyTo(ms);

                model = new FlatBufferModel(ms.ToArray());
            }

            if (!model.CheckModelIdentifier())
            {
                return(false);
            }

            var op = new BuildinOpResolver();

            interpreter = new Interpreter(model, op);

            if (useNumThreads)
            {
                interpreter.SetNumThreads(Environment.ProcessorCount);
            }

            var allocateTensorStatus = interpreter.AllocateTensors();

            if (allocateTensorStatus == Status.Error)
            {
                return(false);
            }

            var input = interpreter.GetInput();

            inputTensor = interpreter.GetTensor(input[0]);

            var output      = interpreter.GetOutput();
            var outputIndex = output[0];

            outputTensors = new Tensor[output.Length];
            for (var i = 0; i < output.Length; i++)
            {
                outputTensors[i] = interpreter.GetTensor(outputIndex + i);
            }

            return(true);
        }
        public void Recognize(int[] colors)
        {
            if (!initialized)
            {
                throw new Exception("Initialize TensorflowLiteService first");
            }

            MessagingCenter.Instance.Send(this, nameof(AR.InputTensorMessage), new InputTensorMessage()
            {
                Colors = colors,
            });

            using (var op = new BuildinOpResolver())
            {
                using (var interpreter = new Interpreter(model, op))
                {
                    InvokeInterpreter(colors, interpreter);
                }
            }
        }
Esempio n. 3
0
    private void onDownloadCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e)
    {
        String localFileName = _downloadManager.Files[0].LocalFile;

        if (_labels == null)
        {
            _labels = File.ReadAllLines(_downloadManager.Files[1].LocalFile);
        }

        System.Diagnostics.Debug.Assert(File.Exists(localFileName), "File doesn't exist");
        FileInfo file = new FileInfo(localFileName);

        if (_model == null)
        {
            _model = new FlatBufferModel(localFileName);
            if (!_model.CheckModelIdentifier())
            {
                throw new Exception("Model indentifier check failed");
            }
        }

        if (_resolver == null)
        {
            _resolver = new BuildinOpResolver();
        }

        if (_interpreter == null)
        {
            _interpreter = new Interpreter(_model, _resolver);
            Status allocateTensorStatus = _interpreter.AllocateTensors();
            if (allocateTensorStatus == Status.Error)
            {
                throw new Exception("Failed to allocate tensor");
            }
        }
    }
Esempio n. 4
0
        private void onDownloadCompleted(object sender, System.ComponentModel.AsyncCompletedEventArgs e)
        {
            String localFileName = _downloadManager.Files[0].LocalFile;

            if (_labels == null)
            {
                _labels = File.ReadAllLines(_downloadManager.Files[1].LocalFile);
            }

            System.Diagnostics.Debug.Assert(File.Exists(localFileName), "File doesn't exist");
            FileInfo file = new FileInfo(localFileName);

            if (_model == null)
            {
                _model = new FlatBufferModel(localFileName);
                if (!_model.CheckModelIdentifier())
                {
                    throw new Exception("Model indentifier check failed");
                }
            }

            if (_resolver == null)
            {
                _resolver = new BuildinOpResolver();
            }

            if (_interpreter == null)
            {
                _interpreter = new Interpreter(_model, _resolver);
                Status allocateTensorStatus = _interpreter.AllocateTensors();
                if (allocateTensorStatus == Status.Error)
                {
                    throw new Exception("Failed to allocate tensor");
                }
            }

            int[] input  = _interpreter.GetInput();
            int[] output = _interpreter.GetOutput();

            Tensor inputTensor  = _interpreter.GetTensor(input[0]);
            Tensor outputTensor = _interpreter.GetTensor(output[0]);

            NativeImageIO.ReadImageFileToTensor(_image[0], inputTensor.DataPointer, 224, 224, 128.0f, 1.0f / 128.0f);
            Stopwatch watch = Stopwatch.StartNew();

            _interpreter.Invoke();
            watch.Stop();

            float[] probability = outputTensor.GetData() as float[];

            String resStr = String.Empty;

            if (probability != null)
            {
                float maxVal = 0;
                int   maxIdx = 0;
                for (int i = 0; i < probability.Length; i++)
                {
                    if (probability[i] > maxVal)
                    {
                        maxVal = probability[i];
                        maxIdx = i;
                    }
                }
                resStr = String.Format("Object is {0} with {1}% probability. Recognition completed in {2} milliseconds.", _labels[maxIdx], maxVal * 100, watch.ElapsedMilliseconds);
            }

            SetImage(_image[0]);
            SetMessage(resStr);
        }