private RecognizedTextPart RecognizeTextPart(OrderedBitmap page, IActorRef selfRef, DocumentToProcess document,
                                                     ICroppedArea croppedPart, FlattenedCroppedArea downscaleResult)
        {
            var recognitionResult = _handwrittenInputRecognizer.RecognizeFrom(downscaleResult).Result;

            recognitionResult.AreaName = croppedPart.AreaUsedForCropping.AreaName;

            selfRef.Tell(new DocumentPageTextAreaProcessingCompleted(document.Id, (int)page.Order,
                                                                     croppedPart.AreaUsedForCropping.AreaName, recognitionResult));

            return(recognitionResult as RecognizedTextPart);
        }
        public ProcessingResult <IRecognizedPart <string> > RecognizeFrom(FlattenedCroppedArea croppedArea)
        {
            var expectedData = croppedArea.AreaUsedForCropping.ExpectedData.AsFileStorageModel();
            var modelBytes   = _getRecognitionModelHandler.Handle(
                new GetImageRecognitionModel
            {
                ExpectedData = expectedData
            });

            var model = Function.Load(modelBytes, _device);

            try
            {
                var areaPartsPredictions = new List <OrderedRecognitionOutput>();

                foreach (var flattenedBitmap in croppedArea.FlattenedBitmaps.OrderBy(fb => fb.Order))
                {
                    if (flattenedBitmap.FlattenedBitmap == null)
                    {
                        areaPartsPredictions.Add(new OrderedRecognitionOutput(default(char), 1f, flattenedBitmap.Order));
                        continue;
                    }

                    Variable inputVar   = model.Arguments[0];
                    NDShape  inputShape = inputVar.Shape;

                    List <float> flattenedBitmapFeatures =
                        flattenedBitmap.FlattenedBitmap.Select(feat => (float)feat).ToList();

                    // Create input data map
                    var inputDataMap = new Dictionary <Variable, Value>();
                    var inputVal     = Value.CreateBatch(inputShape, flattenedBitmapFeatures, _device);
                    inputDataMap.Add(inputVar, inputVal);

                    Variable outputVar = model.Output;

                    // Create output data map. Using null as Value to indicate using system allocated memory.
                    // Alternatively, create a Value object and add it to the data map.
                    var outputDataMap = new Dictionary <Variable, Value>();
                    outputDataMap.Add(outputVar, null);

                    // Start evaluation on the device
                    model.Evaluate(inputDataMap, outputDataMap, _device);

                    // Get evaluate result as dense output
                    var outputVal  = outputDataMap[outputVar];
                    var outputData = outputVal.GetDenseData <float>(outputVar);
                    Dictionary <int, float> outputPred = new Dictionary <int, float>();

                    for (int i = 0; i < outputData[0].Count; i++)
                    {
                        outputPred.Add(i, outputData[0][i]);
                    }

                    var topPrediction = outputPred.GetTopRelativePrediction();
                    areaPartsPredictions.Add(new OrderedRecognitionOutput(
                                                 _labelMapConfiguration.GetCharFromLabel(croppedArea.AreaUsedForCropping.ExpectedData, topPrediction.EvaluationLabel),
                                                 topPrediction.RelativePercentageScore,
                                                 flattenedBitmap.Order));
                }

                return(ProcessingResult <IRecognizedPart <string> > .Success(
                           new RecognizedTextPart(croppedArea.AreaUsedForCropping.Id, areaPartsPredictions, croppedArea.DocumentId)));
            }
            catch (Exception ex)
            {
                //TODO: Return failure
                throw ex;
            }
        }