コード例 #1
0
        public ProcessingResult <ICroppedArea> ExtractRegionOfInterestFrom(ICroppedArea areaOfExtraction)
        {
            using (areaOfExtraction)
            {
                List <OrderedBitmap> extractedRegionsOfInterest = new List <OrderedBitmap>();

                foreach (var areaPart in areaOfExtraction.CroppedParts)
                {
                    var partBytes = areaPart.Bitmap.AsFlattenedByteArray();

                    var leftEdge   = GetLeftEdgeOfRegionOfInterest(partBytes);
                    var rightEdge  = GetRightEdgeOfRegionOfInterest(partBytes);
                    var topEdge    = GetTopEdgeOfRegionOfInterest(partBytes);
                    var bottomEdge = GetBottomEdgeOfRegionOfInterest(partBytes);

                    if (leftEdge == null || rightEdge == null || topEdge == null || bottomEdge == null ||
                        (bottomEdge.Value - topEdge.Value) < 10)
                    {
                        //Nothing interesting - return empty result
                        extractedRegionsOfInterest.Add(new OrderedBitmap(areaPart.Order, null));
                        continue;
                    }

                    var size = new Dimension2D(rightEdge.Value - leftEdge.Value + 1, bottomEdge.Value - topEdge.Value + 1)
                               .GetInputSizeAsMultipliesOfFour().AsSize();

                    var croppedBitmap = new Crop(new Rectangle(new Point(leftEdge.Value, topEdge.Value), size)).Apply(areaPart.Bitmap);

                    extractedRegionsOfInterest.Add(new OrderedBitmap(areaPart.Order, croppedBitmap));
                }

                return(ProcessingResult <ICroppedArea> .Success(
                           new CroppedArea(areaOfExtraction.AreaUsedForCropping, extractedRegionsOfInterest, areaOfExtraction.DocumentId)));
            }
        }
コード例 #2
0
        public ProcessingResult <ICroppedArea> CleanUp(ICroppedArea areaToClean)
        {
            if (areaToClean.CroppedParts == null || areaToClean.CroppedParts.Count == 0)
            {
                return(ProcessingResult <ICroppedArea> .Failure(new EmptyInput()));
            }

            using (areaToClean)
            {
                try
                {
                    var cleanedParts = new List <OrderedBitmap>();

                    foreach (var croppedPart in areaToClean.CroppedParts)
                    {
                        Bitmap formattedImage = Grayscale.CommonAlgorithms.RMY.Apply(croppedPart.Bitmap);
                        new BradleyLocalThresholding().ApplyInPlace(formattedImage);
                        cleanedParts.Add(new OrderedBitmap(croppedPart.Order, formattedImage));
                    }
                    return(ProcessingResult <ICroppedArea> .Success(
                               new CroppedArea(areaToClean.AreaUsedForCropping, cleanedParts, areaToClean.DocumentId)));
                }
                catch (UnsupportedImageFormatException formatException)
                {
                    return(ProcessingResult <ICroppedArea> .Failure(new UnsupportedImageFormat()));
                }
                catch (Exception generalException)
                {
                    return(ProcessingResult <ICroppedArea> .Failure(new UncaughtException(generalException)));
                }
            }
        }
コード例 #3
0
        public ProcessingResult <FlattenedCroppedArea> ScaleAndFlatten(ICroppedArea croppedArea)
        {
            FlattenedCroppedArea output;

            using (croppedArea)
            {
                var flattenedResults = new List <OrderedFlattenedBitmap>();


                foreach (var croppedAreaPart in croppedArea.CroppedParts)
                {
                    if (croppedAreaPart.Bitmap == null)
                    {
                        flattenedResults.Add(new OrderedFlattenedBitmap(croppedAreaPart.Order, null));
                        continue;
                    }

                    //EMNIST expects black background and white text
                    new Invert().ApplyInPlace(croppedAreaPart.Bitmap);

                    //Bitmap will force stride to be 4x, so let's avoid mess up and do it ourselves
                    var inputSizeAsMultipliesOfFour = GetInputSizeFromBitmap(croppedAreaPart.Bitmap).GetInputSizeAsMultipliesOfFour();

                    //Downscale to height = 28 and width scaled by same factor
                    var inputSizeAfterDownscale = GetDimensionsForResizeBeforeCentering(inputSizeAsMultipliesOfFour);

                    //And fix it to mulitple of 4 again
                    inputSizeAfterDownscale = inputSizeAfterDownscale.GetInputSizeAsMultipliesOfFour();

                    //Blur it a little for better feature recognition
                    new GaussianBlur(0.4, 3).ApplyInPlace(croppedAreaPart.Bitmap);

                    //Resize to multiples of four
                    using (var bitmapFixedToMultiplyOfFour =
                               new ResizeBilinear(inputSizeAsMultipliesOfFour.Width, inputSizeAsMultipliesOfFour.Height)
                               .Apply(croppedAreaPart.Bitmap))
                        //Resize to 28*XX
                        using (var bitmapDownscaledToExpectedHeight =
                                   new ResizeBilinear(inputSizeAfterDownscale.Width, inputSizeAfterDownscale.Height).Apply(
                                       bitmapFixedToMultiplyOfFour))
                        //using (var bitmapResized =
                        //            new ResizeBilinear(28, 28)
                        //                .Apply(croppedAreaPart.Bitmap))
                        {
                            //Resize canvas and center to 28*28
                            flattenedResults.Add(new OrderedFlattenedBitmap(croppedAreaPart.Order,
                                                                            CreatedSquaredAndCenteredBitmapFrom(bitmapDownscaledToExpectedHeight)));
                        }
                }

                output = new FlattenedCroppedArea(croppedArea.AreaUsedForCropping, flattenedResults, croppedArea.DocumentId);
            }

            //Make bitmap squared instead of rectangular
            return(ProcessingResult <FlattenedCroppedArea> .Success(output));
        }
コード例 #4
0
        public ProcessingResult <ICollection <ICroppedArea> > CropUserInput(Bitmap bitmapToCropIntoParts, Guid documentId,
                                                                            ITemplatePageDefinition definitionForCropping)
        {
            if (definitionForCropping == null)
            {
                return(ProcessingResult <ICollection <ICroppedArea> > .Failure(new PageDefinitionNotProvided()));
            }

            if (documentId == Guid.Empty)
            {
                return(ProcessingResult <ICollection <ICroppedArea> > .Failure(new MissingDocumentId()));
            }

            //Can this be more granular without downloading bitmap all over again?
            //Maybe parent holding bitmap and just passing it over to all children?
            using (bitmapToCropIntoParts)
            {
                var results = new List <ICroppedArea>();
                foreach (var templateArea in definitionForCropping.DefinedAreas)
                {
                    try
                    {
                        var croppedAreaParts = new List <OrderedBitmap>();

                        if (templateArea.InterestPoints == null || templateArea.InterestPoints.Count == 0)
                        {
                            croppedAreaParts.Add(
                                new OrderedBitmap(0, new Crop(templateArea.AreaDimension).Apply(bitmapToCropIntoParts)));
                        }
                        else
                        {
                            foreach (var areaPart in templateArea.InterestPoints)
                            {
                                croppedAreaParts.Add(
                                    new OrderedBitmap(areaPart.OrderInArea, new Crop(areaPart.Dimension).Apply(bitmapToCropIntoParts)));
                            }
                        }

                        results.Add(new CroppedArea(templateArea, croppedAreaParts, documentId));
                    }
                    catch (UnsupportedImageFormatException)
                    {
                        return(ProcessingResult <ICollection <ICroppedArea> > .Failure(new UnsupportedImageFormat()));
                    }
                    catch (Exception ex)
                    {
                        ProcessingResult <ICollection <ICroppedArea> > .Failure(new UncaughtException(ex));
                    }
                }
                return(ProcessingResult <ICollection <ICroppedArea> > .Success(results));
            }
        }
        public ProcessingResult <Bitmap> ExtractHandwrittenInput(Bitmap userInput, ITemplatePageDefinition templatePage)
        {
            if (userInput == null || templatePage == null)
            {
                return(ProcessingResult <Bitmap> .Failure(new EmptyInput()));
            }

            using (userInput)
            {
                Bitmap canvasBitmap = null;

                try
                {
                    canvasBitmap = _canvasBitmapQueryHandler.Handle(
                        new GetCanvasBitmapForTemplatePage {
                        TemplatePageId = templatePage.Id
                    }).FileData.AsBitmap();

                    if (canvasBitmap == null)
                    {
                        //TODO: ResultMessage for failed HTTP request or pipe from ResourceQueryHandler
                        return(new ProcessingResult <Bitmap>(StepOutcome.Failure, null, null));
                    }

                    if (canvasBitmap.Size != userInput.Size)
                    {
                        using (var oldCanvas = canvasBitmap)
                        {
                            canvasBitmap = new ResizeBilinear(userInput.Width, userInput.Height).Apply(oldCanvas);
                        }
                    }

                    var result = new Invert().Apply(new Difference(userInput).Apply(canvasBitmap));

                    return(ProcessingResult <Bitmap> .Success(result));
                }
                catch (UnsupportedImageFormatException)
                {
                    return(ProcessingResult <Bitmap> .Failure(new UnsupportedImageFormat()));
                }
                finally
                {
                    canvasBitmap?.Dispose();
                }
            }
        }
コード例 #6
0
        public ProcessingResult <IRecognizedPart <bool> > RecognizeFrom(ICroppedArea croppedArea)
        {
            if (croppedArea.CroppedParts.Count != 1)
            {
                //TODO: ResultDetails for - expected only one element
                return(ProcessingResult <IRecognizedPart <bool> > .Failure(null));
            }

            var croppedElement = croppedArea.CroppedParts.Single();

            if (croppedElement == null)
            {
                //TODO: ResultDetails for - empty element in CroppedArea
                return(ProcessingResult <IRecognizedPart <bool> > .Failure(null));
            }

            return(ProcessingResult <IRecognizedPart <bool> > .Success(
                       new RecognizedChoicePart(croppedArea.AreaUsedForCropping.Id,
                                                croppedArea.DocumentId,
                                                //Empty field - no input; otherwise - is checked via handwriting
                                                croppedElement.Bitmap != null)));
        }
        public ProcessingResult <IRecognizedPart <string> > RecognizeFrom(FlattenedCroppedArea croppedArea)
        {
            var expectedData = croppedArea.AreaUsedForCropping.ExpectedData.AsFileStorageModel();
            var modelBytes   = _getRecognitionModelHandler.Handle(
                new GetImageRecognitionModel
            {
                ExpectedData = expectedData
            });

            var model = Function.Load(modelBytes, _device);

            try
            {
                var areaPartsPredictions = new List <OrderedRecognitionOutput>();

                foreach (var flattenedBitmap in croppedArea.FlattenedBitmaps.OrderBy(fb => fb.Order))
                {
                    if (flattenedBitmap.FlattenedBitmap == null)
                    {
                        areaPartsPredictions.Add(new OrderedRecognitionOutput(default(char), 1f, flattenedBitmap.Order));
                        continue;
                    }

                    Variable inputVar   = model.Arguments[0];
                    NDShape  inputShape = inputVar.Shape;

                    List <float> flattenedBitmapFeatures =
                        flattenedBitmap.FlattenedBitmap.Select(feat => (float)feat).ToList();

                    // Create input data map
                    var inputDataMap = new Dictionary <Variable, Value>();
                    var inputVal     = Value.CreateBatch(inputShape, flattenedBitmapFeatures, _device);
                    inputDataMap.Add(inputVar, inputVal);

                    Variable outputVar = model.Output;

                    // Create output data map. Using null as Value to indicate using system allocated memory.
                    // Alternatively, create a Value object and add it to the data map.
                    var outputDataMap = new Dictionary <Variable, Value>();
                    outputDataMap.Add(outputVar, null);

                    // Start evaluation on the device
                    model.Evaluate(inputDataMap, outputDataMap, _device);

                    // Get evaluate result as dense output
                    var outputVal  = outputDataMap[outputVar];
                    var outputData = outputVal.GetDenseData <float>(outputVar);
                    Dictionary <int, float> outputPred = new Dictionary <int, float>();

                    for (int i = 0; i < outputData[0].Count; i++)
                    {
                        outputPred.Add(i, outputData[0][i]);
                    }

                    var topPrediction = outputPred.GetTopRelativePrediction();
                    areaPartsPredictions.Add(new OrderedRecognitionOutput(
                                                 _labelMapConfiguration.GetCharFromLabel(croppedArea.AreaUsedForCropping.ExpectedData, topPrediction.EvaluationLabel),
                                                 topPrediction.RelativePercentageScore,
                                                 flattenedBitmap.Order));
                }

                return(ProcessingResult <IRecognizedPart <string> > .Success(
                           new RecognizedTextPart(croppedArea.AreaUsedForCropping.Id, areaPartsPredictions, croppedArea.DocumentId)));
            }
            catch (Exception ex)
            {
                //TODO: Return failure
                throw ex;
            }
        }