public async Task<IEnumerable<Prediction>> ClassifyAsync(byte[] bytes)
        {
            var mappedByteBuffer = GetModelAsMappedByteBuffer();

            //var interpreter = new Xamarin.TensorFlow.Lite.Interpreter(mappedByteBuffer);

            System.Console.WriteLine($"Running Tensorflow interpreter");
            System.Console.WriteLine($"Tensorflow runtime version {TensorFlowLite.RuntimeVersion()}");
            System.Console.WriteLine($"Tensorflow schema version {TensorFlowLite.SchemaVersion()}");
            
            var interpreterOptions = new Interpreter.Options();
            //TODO: Pass from UI?
            var numThreads = 1;
            interpreterOptions.SetNumThreads(numThreads);
            //TODO: Look into use of GPU delegate vs NNAPI
            // https://developer.android.com/ndk/guides/neuralnetworks
            interpreterOptions.SetUseNNAPI(true);
            interpreterOptions.SetAllowFp16PrecisionForFp32(true);

            //var interpreter = new Interpreter(mappedByteBuffer);
            var interpreter = new Interpreter(mappedByteBuffer, interpreterOptions);

            var tensor = interpreter.GetInputTensor(0);

            var shape = tensor.Shape();

            var width = shape[1];
            var height = shape[2];

            var labels = await LoadLabelsAsync(LabelsFileName);
            var byteBuffer = GetPhotoAsByteBuffer(bytes, width, height);

            //var outputLocations = new float[1][] { new float[labels.Count] };
            var outputLocations = new[] { new float[labels.Count] };

            var outputs = Java.Lang.Object.FromArray(outputLocations);

            interpreter.Run(byteBuffer, outputs);

            var classificationResult = outputs.ToArray<float[]>();

            var result = new List<Prediction>();

            for (var i = 0; i < labels.Count; i++)
            {
                var label = labels[i];
                result.Add(new Prediction(label, classificationResult[0][i]));
            }

            //TODO: Consider using this event or MediatR to return results to view model
            //https://blog.duijzer.com/posts/mvvmcross_with_mediatr/
            PredictionCompleted?.Invoke(this, new PredictionCompletedEventArgs(result));

            return result;
        }
        public async Task <PredictionResult> PredictAsync(Stream stream)
        {
            //https://www.tensorflow.org/lite/models/image_classification/android
            //Code https://github.com/tensorflow/examples/blob/master/lite/examples/image_classification/android/app/src/main/java/org/tensorflow/lite/examples/classification/tflite/Classifier.java

            //Kotlin example https://medium.com/@teresa.wu/tensorflow-image-recognition-on-android-with-kotlin-cee8d977ae9
            // - code at https://github.com/teresawu/random

            //var tf = new Tensor();
            _labels = _labels ?? (_labels = await LoadLabels(LabelsFileName));
            //_model = _model ?? (_model = await LoadModel(ModelFileName));
            //_modelFile = _modelFile ?? (_modelFile = await LoadModelFile(ModelFileName));
            _modelBuffer = _modelBuffer ?? (_modelBuffer = await LoadModelFileBuffer(ModelFileName));
            //var device = Android.App.Application.Context.Dev
            //Xamarin.T.TensorFlow.Lite.
            //var x = new TensorFlowInferenceInterface()
            //TODO: Look into GPU - GpuHelperDelegate g;

            //TODO: Pass from UI?
            var numThreads = 1;

            var tfOptions = new Interpreter.Options();

            tfOptions.SetNumThreads(numThreads);
            //tfOptions.SetUseNNAPI(true);
            tfOptions.SetAllowFp16PrecisionForFp32(true);

            //TODO: Look into use of GPU delegate vs NNAPI
            // https://developer.android.com/ndk/guides/neuralnetworks

            //https://stackoverflow.com/questions/30716027/c-sharp-equivalent-of-java-memory-mapping-methods
            //ByteBuffer.FromArray()
            //var buff = new ByteBuffer()
            var interpreter = new Interpreter(_modelBuffer, tfOptions);

            //var imgData =  new int[ImageWidth * ImageHeight];
            //bitmap.GetPixels(argbPixelArray, 0, bitmap.Width, 0, 0, bitmap.Width, bitmap.Height);
            var bitmap  = CreateBitmap(stream);
            var imgData = ConvertBitmapToByteBuffer(bitmap);

            //var normalizedPixelComponents = new float[argbPixelArray.Length * 3];

            //var arr = new float[1, _labels.Count];
            //var arr1 = new float[_labels.Count];
            //var labelProbArray = new float[_labels.Count];
            //var objectArray = new object[1];
            var labelProbArray = new float[1][] {
                new float[_labels.Count]
            };

            //https://forums.xamarin.com/discussion/1930/creating-arrays-of-custom-java-objects-in-jni
            //var labelProbArray = Java.Lang.Object.FromArray<IJavaObject>();
            //var javaProbArray = Java.Lang.Object.FromArray(labelArray);
            //var javaObjectArray = Java.Lang.Object.FromArray(objectArray);
            var javaLabelProbArray = Java.Lang.Object.FromArray(labelProbArray);
            //var labelProbArray = Java.Lang.Object.To(labelArray);

            var javaLabelProbArray1 = Java.Lang.Object.FromArray(labelProbArray[0]);
            //var javaLabelProbArray1 = new
            var javaProbArrayContainer = new Java.Lang.Object[] { javaLabelProbArray1 };

            var floatArrayArray = new Java.Lang.Object(JNIEnv.NewArray(labelProbArray), JniHandleOwnership.TransferLocalRef);

            //var array = JNIEnv.NewArray(arr);
            //Java.Lang.Object[] values = arr
            //    .Select(b => new Java.Lang.Object(b.ToLocalJniHandle(), JniHandleOwnership.TransferLocalRef))
            //    .ToArray();

            //var intPtrFloatArray = JNIEnv.NewArray<float>(labelProbArray);
            //var labelProbArrayContainer = new Java.Lang.Object[] { javaLabelProbArray };

            //values2[0] = labelProbArray;

            //Array<ByteArray>

            try
            {
                // Run the inference
                //Try RunForMultipleInputsOutputs
                //    ?? GetOutputTensor
                //  - see https://github.com/tensorflow/tensorflow/issues/25751
                // or https://devblogs.microsoft.com/xamarin/android-apps-tensorflow/
                interpreter.Run(imgData, floatArrayArray);
            }
            catch (Exception e)
            {
                System.Console.WriteLine(e);
                throw;
            }

            //var outputArrayFirstDimension = javaProbArrayContainer[0];
            //var outputs = outputArrayFirstDimension.ToArray<float[][]>();

            //https://stackoverflow.com/questions/17305522/mono-for-android-binding-jagged-array

            //float[][] payload = (float[][])JNIEnv.GetArray(floatArrayArray, JniHandleOwnership.DoNotTransfer, typeof(float[]));
            //float[][] payload = JNIEnv.GetArray<float[]>(javaLabelProbArray);
            float[][] outputs = JNIEnv.GetArray <float[]>(floatArrayArray.Handle);
            for (int i = 0; i < outputs[0].Length; i++)
            {
                if (!float.IsNaN(outputs[0][i]))
                {
                }

                if (outputs[0][i] > 0)
                {
                }
            }


            /*
             * using (var byteArrayArray = new Java.Lang.Object(JNIEnv.NewArray(data), JniHandleOwnership.TransferLocalRef))
             * {
             *  Console.WriteLine("# jonp [[b: {0}", JNIEnv.GetClassNameFromInstance(byteArrayArray.Handle));
             *  byte[][] data2 = JNIEnv.GetArray<byte[]>(byteArrayArray.Handle);
             */

            //https://stackoverflow.com/questions/6594250/type-cast-from-java-lang-object-to-native-clr-type-in-monodroid
            var obj          = javaLabelProbArray;
            var propertyInfo = obj.GetType().GetProperty("Instance");
            var result       = propertyInfo == null
                ? null
                : propertyInfo.GetValue(obj, null) as float[][];

            //var outputs = javaLabelProbArray.ToArray<float[][]>();


            // Find the best classifications.
            var recognitions = new List <Recognition>(_labels.Count);

            for (var i = 0; i < _labels.Count; i++)
            {
                recognitions.Add(new Recognition
                                 (
                                     i.ToString(),
                                     _labels[i],
                                     labelProbArray[0][i],
                                     null
                                 ));
            }
            // Sort high-to-low via confidence
            //Array.Sort(results, (x, y) => y.Confidence.CompareTo(x.Confidence));

            var orderedRecognitions = recognitions.OrderByDescending(x => x.Confidence).ToList();

            foreach (var recognition in orderedRecognitions)
            {
                System.Console.WriteLine($"Result {recognition.Title} with confidence {recognition.Confidence}");
            }

            /*
             * var pq =
             *  new PriorityQueue<Recognition>(
             *      3,
             *      new Comparator<Recognition>() {
             *          @Override
             *          public int compare(Recognition lhs, Recognition rhs)
             *          {
             *          // Intentionally reversed to put high confidence at the head of the queue.
             *          return Float.compare(rhs.getConfidence(), lhs.getConfidence());
             *      }
             * });
             * for (int i = 0; i < labels.size(); ++i) {
             *  pq.add(
             *      new Recognition(
             *          "" + i,
             *          labels.size() > i? labels.get(i) : "unknown",
             *          getNormalizedProbability(i),
             *          null));
             * }
             * final ArrayList<Recognition> recognitions = new ArrayList<Recognition>();
             * int recognitionsSize = Math.min(pq.size(), MAX_RESULTS);
             * for (int i = 0; i<recognitionsSize; ++i) {
             *  recognitions.add(pq.poll());
             * }
             */

            return
                (await Task.FromResult(
                     new PredictionResult
            {
                Success = true,
                Answer = $"{orderedRecognitions.First().Title} with confidence {orderedRecognitions.First().Confidence:0.##}"
            }));
        }