Beispiel #1
0
        public string getPrediction(Plugin.Media.Abstractions.MediaFile image)
        {
            Task <Bitmap> temp   = getBitmap(image);
            Bitmap        bitmap = temp.Result;
            var           assets = Android.App.Application.Context.Assets;
            TensorFlowInferenceInterface inferenceInterface = new TensorFlowInferenceInterface(assets, "frozen_inference_graph.pb");
            var sr     = new StreamReader(assets.Open("labels.txt"));
            var labels = sr.ReadToEnd()
                         .Split('\n')
                         .Select(s => s.Trim())
                         .Where(s => !string.IsNullOrEmpty(s))
                         .ToList();
            var outputNames = new[] { "detection_classes" };
            var floatValues = GetBitmapPixels(bitmap);
            var outputs     = new float[labels.Count];

            inferenceInterface.Feed("ToFloat", floatValues, 1, 227, 227, 3);
            inferenceInterface.Run(outputNames);
            inferenceInterface.Fetch("detection_classes", outputs);

            var results = new List <Tuple <float, string> >();

            for (var i = 0; i < outputs.Length; ++i)
            {
                results.Add(Tuple.Create(outputs[i], labels[i]));
            }

            return(results.OrderByDescending(t => t.Item1).First().Item2);
        }
Beispiel #2
0
        public Task LoadModelFromUrl(string url)
        {
            if (string.IsNullOrWhiteSpace(url))
            {
                throw new ArgumentNullException(nameof(url));
            }
            return(Task.Run(async() =>
            {
                // HACK: unzip model in azure indead of device and get the stream
                url = $"{Constants.AzureFunctionsUrl}/MLExport?platform=tensorflow&format=binary&code={Constants.AzureFunctionsCode}";

                // TODO: we still need labels

                var client = new HttpClient();
                var rawModelBytes = await client.GetByteArrayAsync("https://mobcatnotmobcat.azurewebsites.net/api/MLExport?platform=tensorflow&format=binary&code=rLzmW4glo3/KUahNnEHlI/fngVrlzQJ/9y4YIIHS6yHAtNKNHiCm0w==");
                Stream zipModelStream = new MemoryStream(rawModelBytes);
                inferenceInterface = new TensorFlowInferenceInterface(zipModelStream);

                var assets = Android.App.Application.Context.Assets;
                using (var sr = new StreamReader(assets.Open("labels2.txt")))
                {
                    var content = sr.ReadToEnd();
                    _loadedLabels = content.Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s)).ToList();
                }
            }));
        }
        internal void Init(string modelName, string labelsFileName, ModelType modelType)
        {
            _modelType = modelType;

            try
            {
                var assets = Android.App.Application.Context.Assets;
                using (var sr = new StreamReader(File.Exists(labelsFileName)? File.OpenRead(labelsFileName) : assets.Open(labelsFileName)))
                {
                    var content = sr.ReadToEnd();
                    _labels = content.Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s)).ToList();
                }

                _inferenceInterface = new TensorFlowInferenceInterface(assets, modelName);
                InputSize           = Convert.ToInt32(_inferenceInterface.GraphOperation(InputName).Output(0).Shape().Size(1));
                var iter = _inferenceInterface.Graph().Operations();
                while (iter.HasNext && !_hasNormalizationLayer)
                {
                    var op = iter.Next() as Operation;
                    if (op.Name().Contains(DataNormLayerPrefix))
                    {
                        _hasNormalizationLayer = true;
                    }
                }
            }
            catch (Exception ex)
            {
                throw new ImageClassifierException("Failed to load the model - check the inner exception for more details", ex);
            }
        }
Beispiel #4
0
 public PhotoDetector()
 {
     _inferenceInterface = new TensorFlowInferenceInterface(CrossCurrentActivity.Current.Activity.Assets, ModelFile);
     using (var sr = new StreamReader(CrossCurrentActivity.Current.Activity.Assets.Open(LabelFile)))
     {
         _labels = sr.ReadToEnd().Split('\n').Select(x => x.Trim()).Where(x => !string.IsNullOrEmpty(x)).ToArray();
     }
 }
        public InceptionClassifier()
        {
            var assetManager = CrossCurrentActivity.Current.Activity.Assets;

            labels = new List <string>();
            labels.AddRange(ReadLabelsIntoMemory(assetManager, LABEL_FILE));

            inferenceInterface = new TensorFlowInferenceInterface(assetManager, MODEL_FILE);
        }
Beispiel #6
0
        public TensorflowContribAndroidPipeline(string modelName, int modelInputSize, string inputName, string outputName)
        {
            var assets = Application.Context.Assets;

            inferenceInterface = new TensorFlowInferenceInterface(assets, modelName);
            ModelInputSize     = modelInputSize;
            InputName          = inputName;
            OutputName         = outputName;
        }
Beispiel #7
0
        public void LoadModel(string path)
        {
            var name = Path.GetFileNameWithoutExtension(path);

            path = name + ".pb";
            if (path.Equals(LastModelLoaded))
            {
                return;
            }

            var           info  = CorePackage.Entity.Type.Resource.Instance.Directory + name + ".txt";
            List <string> lines = new List <string>();

            using (StreamReader sr = new StreamReader(info))
            {
                while (sr.Peek() >= 0)
                {
                    lines.Add(sr.ReadLine());
                }
            }

            _inputName  = lines[0];
            _outputName = lines[1];
            var shape = lines[2].Replace(" ", "").Replace("(", "").Replace(")", "").Split(",");

            _inputSize = int.Parse(shape[1]);
            _lastDim   = int.Parse(shape[3]);
            CorePackage.Entity.Type.Resource.Instance.Directory = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData) + "/";
            var modelFile = File.Open(CorePackage.Entity.Type.Resource.Instance.Directory + path, FileMode.Open);

            try
            {
                //var assets = Android.App.Application.Context.Assets;
                //using (var sr = new StreamReader(assets.Open(labelsFileName)))
                //{
                //    var content = sr.ReadToEnd();
                //    _labels = content.Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s)).ToList();
                //}

                _inferenceInterface = new TensorFlowInferenceInterface(modelFile);
                LastModelLoaded     = path;
                var iter = _inferenceInterface.Graph().Operations();
                while (iter.HasNext && !_hasNormalizationLayer)
                {
                    var op = iter.Next() as Operation;
                    if (op.Name().Contains(DataNormLayerPrefix))
                    {
                        _hasNormalizationLayer = true;
                    }
                }
            }
            catch (Exception ex)
            {
                throw new RuntimeException("Failed to load the model - check the inner exception for more details" + ex.Message);
            }
        }
        public ImageClassifier()
        {
            var assets = Application.Context.Assets;

            inferenceInterface = new TensorFlowInferenceInterface(assets, "model.pb");
            using (var sr = new StreamReader(assets.Open("labels.txt")))
            {
                var content = sr.ReadToEnd();
                labels = content.Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s)).ToList();
            }
        }
Beispiel #9
0
        /// <summary>
        /// Initializes a new instance of the <see cref="TensorFlowImageModel"/> class and loads the TensorFlow model.
        /// </summary>
        /// <param name="assets">A reference to an <see cref="AssetManager"/> that will be used to load the model file.</param>
        /// <param name="modelAssetPath">The path to the model file (e.g. "file:///android_asset/model.pb")</param>
        /// <param name="inputTensorName">The name of the input Tensor of the model.</param>
        /// <param name="outputNodeNames">The names of the output nodes of the model.</param>
        /// <param name="outputSizes">The number of values expected in the array at each of the output nodes.</param>
        public TensorFlowImageModel(AssetManager assets, string modelAssetPath, string inputTensorName, string[] outputNodeNames, int[] outputSizes)
        {
            if (outputNodeNames.Length != outputSizes.Length)
            {
                throw new ArgumentException("outputNodeNames (length=" + outputNodeNames.Length + ") and outputSizes (length=" + outputSizes.Length + ") must be of equal length");
            }

            this.tfInterface     = new TensorFlowInferenceInterface(assets, modelAssetPath);
            this.inputTensorName = inputTensorName;
            this.outputNodeNames = outputNodeNames;
            this.outputSizes     = outputSizes;
        }
        public ImageClassifier()
        {
            var assets = Application.Context.Assets;

            _inferenceInterface = new TensorFlowInferenceInterface(assets, ModelFileName);

            using (var sr = new StreamReader(assets.Open(LabelsFileName)))
            {
                var content = sr.ReadToEnd();
                _labels = LoadLabels(content);
            }
        }
        public InceptionClassifier(AssetManager assetManager)
        {
            labels.AddRange(ReadLabelsIntoMemory(assetManager, LABEL_FILE));
            inferenceInterface = new TensorFlowInferenceInterface(assetManager, MODEL_FILE);
            var operation  = inferenceInterface.GraphOperation(OUTPUT_NAME);
            var numClasses = operation.Output(0).Shape().Size(1);

            //TODO: Confirm in Android, because my results are 1001 labels, 1008 classes
            System.Console.WriteLine($"Read {labels.Count} labels, output layer size is {numClasses}");
            var bmp     = BitmapCreate(assetManager, "husky.png");
            var results = Recognize(bmp);
        }
Beispiel #12
0
        public static TensorflowModel CreateTensorflowModel()
        {
            var assets             = Application.Context.Assets;
            var inferenceInterface = new TensorFlowInferenceInterface(assets, ModelPath);
            IEnumerable <string> labels;

            using (var sr = new StreamReader(assets.Open(LabelsPath)))
            {
                var content = sr.ReadToEnd();
                labels = content.Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s)).ToList();
            }

            return(new TensorflowModel(labels.ToArray(), inferenceInterface));
        }
        public void Classify(byte[] bytes)
        {
            var assets             = Application.Context.Assets;
            var inferenceInterface = new TensorFlowInferenceInterface(assets, "people-or-not-model.pb");

            var streamReader = new StreamReader(assets.Open("people-or-not-labels.txt"));

            var labels = streamReader
                         .ReadToEnd()
                         .Split('\n')
                         .Select(s => s.Trim())
                         .Where(s => !string.IsNullOrEmpty(s))
                         .ToList();

            //page 354
            var bitmap        = BitmapFactory.DecodeByteArray(bytes, 0, bytes.Length);
            var resizedBitMap = Bitmap.CreateScaledBitmap(bitmap, 227, 227, false).Copy(Bitmap.Config.Argb8888, false);

            var floatValues = new float[227 * 227 * 3];
            var intValues   = new int[227 * 227 * 3];

            resizedBitMap.GetPixels(intValues, 0, 227, 0, 0, 227, 227);

            for (int i = 0; i < intValues.Length; i++)
            {
                var val = intValues[i];
                floatValues[i * 3 + 0] = ((val & 0xFF) - 104);
                floatValues[i * 3 + 1] = ((val & 8) - 117);
                floatValues[i * 3 + 2] = ((val & 16) - 123);
            }

            var outputs = new float[labels.Count];

            inferenceInterface.Feed("Placeholder", floatValues, 1, 227, 227, 3);
            inferenceInterface.Run(new[] { "loss" });
            inferenceInterface.Fetch("loss", outputs);

            var results = new Dictionary <string, float>();

            for (var i = 0; i < labels.Count; i++)
            {
                var label = labels[i];
                results.Add(label, outputs[i]);
            }

            ClassificationCompleted?.Invoke(this, new ClassificationEventArgs(results));
        }
        public async Task InitializeAsync(ModelType modelType, params string[] parameters)
        {
            var assets    = Android.App.Application.Context.Assets;
            var modelFile = $"file:///android_asset/{parameters[0]}";
            var labelFile = parameters[1];

            labels       = new List <string>();
            using var sr = new StreamReader(assets.Open(labelFile));
            var content = await sr.ReadToEndAsync();

            var labelStrings = content.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries).Select(e => e.TrimEnd('\r'));

            labels.AddRange(labelStrings);

            inferenceInterface = new TensorFlowInferenceInterface(assets, modelFile);
            inputSize          = (int)inferenceInterface.GraphOperation(INPUT_NAME).Output(0).Shape().Size(1);
        }
Beispiel #15
0
        public Task LoadModel()
        {
            if (_loadedLabels != null)
            {
                return(Task.FromResult(true));
            }

            return(Task.Run(() =>
            {
                //loading model and labels
                var assets = Android.App.Application.Context.Assets;
                inferenceInterface = new TensorFlowInferenceInterface(assets, "model.pb");

                using (var sr = new StreamReader(assets.Open("labels.txt")))
                {
                    var content = sr.ReadToEnd();
                    _loadedLabels = content.Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s)).ToList();
                }
            }));
        }
Beispiel #16
0
        public override void Init(string modelName, ModelType modelType, int inputSize = 227, string inputName = "Placeholder", string outputName = "loss")
        {
            base.Init(modelName, modelType, inputSize, inputName, outputName);

            try
            {
                var assets = Android.App.Application.Context.Assets;
                using (var sr = new StreamReader(assets.Open("labels.txt")))
                {
                    var content = sr.ReadToEnd();
                    _labels = content.Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s)).ToList();
                }

                _inferenceInterface = new TensorFlowInferenceInterface(assets, modelName);
            }
            catch (Exception ex)
            {
                throw new ImageClassifierException("Failed to load the model - check the inner exception for more details", ex);
            }
        }
Beispiel #17
0
        public async Task InitializeAsync(ModelType modelType, params string[] parameters)
        {
            ImageMeanR = modelType.ImageMeanR();
            ImageMeanG = modelType.ImageMeanG();
            ImageMeanB = modelType.ImageMeanB();

            var assets    = Android.App.Application.Context.Assets;
            var modelFile = $"file:///android_asset/{parameters[0]}";
            var labelFile = parameters[1];

            labels = new List <string>();
            using (var sr = new StreamReader(assets.Open(labelFile)))
            {
                var content = await sr.ReadToEndAsync();

                var labelStrings = content.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries).Select(e => e.TrimEnd('\r'));
                labels.AddRange(labelStrings);
            }

            inferenceInterface = new TensorFlowInferenceInterface(assets, modelFile);
        }
        protected override void OnActivityResult(int requestCode, [GeneratedEnum] Result resultCode, Intent data)
        {
            base.OnActivityResult(requestCode, resultCode, data);
            Bitmap bitmap = (Bitmap)data.Extras.Get("data");

            imageView.SetImageBitmap(bitmap);

            var assets             = Application.Context.Assets;
            var inferenceInterface = new TensorFlowInferenceInterface(assets, "model.pb");
            var sr     = new StreamReader(assets.Open("labels.txt"));
            var labels = sr.ReadToEnd()
                         .Split('\n')
                         .Select(s => s.Trim())
                         .Where(s => !string.IsNullOrEmpty(s))
                         .ToList();

            var floatValues = GetBitmapPixels(bitmap);
            var outputs     = new float[labels.Count];

            inferenceInterface.Feed("Placeholder", floatValues, 1, 224, 224, 3);
            inferenceInterface.Run(new[] { "loss" });
            inferenceInterface.Fetch("loss", outputs);

            var results = new Recognition[labels.Count];

            for (int i = 0; i < labels.Count; i++)
            {
                results[i] = new Recognition {
                    Confidence = outputs[i], Label = labels[i]
                };
            }

            Array.Sort(results, (x, y) => y.Confidence.CompareTo(x.Confidence));
            var result = String.Format("I think the cat on this picture is: {0}. I'm {1} confident", results[0].Label, results[0].Confidence.ToString("P1"));

            ((TextView)FindViewById(Resource.Id.result)).Text = result;
            //Task.Run(() => TextToSpeechAsync(result));
            //TextToSpeech.SpeakAsync(result).Wait(5000);
            TextToSpeech.SpeakAsync(result).ContinueWith((b) => { Task.Run(() => TextToSpeechAsync(result)); });
        }
        public async void Classification(byte[] bytes)
        {
            var assets             = Application.Context.Assets;
            var inferenceInterface = new TensorFlowInferenceInterface(assets, "model.pb");

            var inputSize = (int)inferenceInterface.GraphOperation("Placeholder").Output(0).Shape().Size(1);

            List <string> labels;

            using (var streamReader = new StreamReader(assets.Open("labels.txt")))
            {
                labels = streamReader.ReadToEnd().Split('\n').Select(s => s.Trim()).Where(s => !string.IsNullOrEmpty(s))
                         .ToList();
            }

            var bitmap = await BitmapFactory.DecodeByteArrayAsync(bytes, 0, bytes.Length);

            var resizedMap = Bitmap.CreateScaledBitmap(bitmap, inputSize, inputSize, false)
                             .Copy(Bitmap.Config.Argb8888, false);

            var floatValues = new float[inputSize * inputSize * 3];
            var intValues   = new int[inputSize * inputSize];

            resizedMap.GetPixels(intValues, 0, inputSize, 0, 0, inputSize, inputSize);

            for (var i = 0; i < intValues.Length; ++i)
            {
                var intValue = intValues[i];
                floatValues[i * 3 + 0] = (intValue & 0xFF) - 105f;
                floatValues[i * 3 + 1] = ((intValue >> 8) & 0xFF) - 117f;
                floatValues[i * 3 + 2] = ((intValue >> 16) & 0xFF) - 124f;
            }

            var operation = inferenceInterface.GraphOperation("loss");

            var outputs = new float[labels.Count];

            inferenceInterface.Feed("Placeholder", floatValues, 1, inputSize, inputSize, 3);
            inferenceInterface.Run(new[] { "loss" }, true);
            inferenceInterface.Fetch("loss", outputs);

            var result = new Dictionary <string, float>();

            for (var i = 0; i < labels.Count; i++)
            {
                var label = labels[i];
                result.Add(label, outputs[i]);
            }

            var maxConf = 0f;

            for (var i = 0; i < outputs.Length; ++i)
            {
                if (outputs[i] > maxConf)
                {
                    maxConf = outputs[i];
                }
            }

            ClassificationCompleted?.Invoke(this, new ClassificationEventArgs(result));
        }
Beispiel #20
0
 private TensorflowModel(string[] labels, TensorFlowInferenceInterface inferenceInterface)
 {
     this.labels             = labels;
     this.inferenceInterface = inferenceInterface;
 }
Beispiel #21
0
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);

            // Set our view from the "main" layout resource
            SetContentView(Resource.Layout.Main);

            // Get our button from the layout resource,
            // and attach an event to it
            Button button = FindViewById <Button>(Resource.Id.myButton);

            button.Click += delegate { button.Text = $"{count++} clicks!"; };

            try
            {
                TensorFlowInferenceInterface tfi = new TensorFlowInferenceInterface(Assets, "file:///android_asset/TF_LSTM_Inference.pb");
                float[] inputSeaLevels           = new float[] {
                    4.92F, 2.022F, -0.206F, 2.355F, 4.08F, 1.828F, -0.005F, 2.83F,
                    4.966F, 2.715F, -0.073F, 1.69F, 3.958F, 2.5F, 0.201F, 2.075F, 4.754F, 3.475F,
                    0.345F, 0.954F, 3.665F, 3.165F, 0.562F, 1.285F, 4.415F, 4.083F, 0.83F, 0.327F,
                    3.304F, 3.589F, 0.976F, 0.707F, 3.989F, 4.37F, 1.375F, 0.039F, 2.863F, 3.715F,
                    1.525F, 0.507F, 3.403F, 4.38F, 2.06F, 0.097F, 2.251F, 3.671F, 2.223F, 0.579F,
                    2.605F, 4.25F, 2.791F, 0.306F, 1.489F, 3.575F, 2.907F, 0.759F, 1.741F, 4.018F,
                    3.381F, 0.589F, 0.796F, 3.382F, 3.441F, 1.103F, 1.099F, 3.566F, 3.75F, 1.09F,
                    0.393F, 2.906F, 3.831F, 1.77F, 0.816F, 2.739F, 3.922F, 1.891F, 0.285F, 2.02F,
                    4.083F, 2.727F, 0.833F, 1.593F, 3.863F, 2.824F, 0.408F, 0.906F, 4.071F, 3.738F,
                    1.149F, 0.511F, 3.431F, 3.626F, 0.863F, -0.004F, 3.57F, 4.568F, 1.906F,
                    -0.079F, 2.489F, 4.115F, 1.777F, -0.396F, 2.456F, 5.046F, 3.106F, -0.04F,
                    1.113F, 4.168F, 2.998F, -0.246F, 0.941F, 5.009F, 4.425F, 0.516F, -0.267F,
                    3.695F, 4.092F, 0.405F, -0.384F, 4.32F, 5.429F, 1.541F, -1.036F, 2.628F,
                    4.711F, 1.479F, -0.977F, 3.043F, 5.82F, 2.856F, -0.932F, 1.249F, 4.713F,
                    2.828F, -0.734F, 1.438F, 5.546F, 4.182F, -0.159F, -0.109F, 4.137F, 4.03F,
                    0.09F, 0.042F, 4.693F, 5.075F, 0.921F, -0.925F, 3.14F, 4.67F, 1.227F, -0.599F,
                    3.458F, 5.274F, 2.089F, -0.893F, 1.945F, 4.618F, 2.489F, -0.336F, 2.106F,
                    4.828F, 3.178F, -0.192F, 0.816F, 4.026F, 3.61F, 0.491F, 0.961F, 3.972F, 3.93F,
                    0.717F, 0.057F, 3.155F, 4.273F, 1.457F, 0.369F, 2.96F, 4.147F, 1.559F, -0.088F,
                    2.226F, 4.349F, 2.398F, 0.485F, 1.964F, 3.861F, 2.329F, 0.335F, 1.357F, 3.972F,
                    3.286F, 1.092F, 1.083F, 3.277F, 3.011F, 0.981F, 0.632F, 3.375F, 3.987F
                };
                String INPUT_ARGUMENT_NAME  = "lstm_1_input";
                String OUTPUT_VARIABLE_NAME = "output_node0";
                int    OUTPUT_SIZE          = 100;

                tfi.Feed(INPUT_ARGUMENT_NAME, inputSeaLevels, inputSeaLevels.Length, 1, 1);
                tfi.Run(new String[] { OUTPUT_VARIABLE_NAME });
                float[] predictions = new float[OUTPUT_SIZE];
                tfi.Fetch(OUTPUT_VARIABLE_NAME, predictions);
                button.Text = predictions[0].ToString();
            }
            catch (Exception x)
            {
                Console.WriteLine(x);
            }

            // Seriously, it's this easy?
            using (var ic = new InceptionClassifier(BaseContext.Assets))
            {
                var results = ic.Recognize(BitmapCreate(BaseContext.Assets, "husky.png"));
                var top     = results.First();
                //				TextView label = FindViewById<TextView>(Resource.Id.)
            }
        }
Beispiel #22
0
 public TensorflowInferencePredictor(AssetManager assetManager)
 {
     inferenceInterface = new TensorFlowInferenceInterface(assetManager, MODEL_FILE_URL);
 }