public ProcessingResult <ICroppedArea> CleanUp(ICroppedArea areaToClean) { if (areaToClean.CroppedParts == null || areaToClean.CroppedParts.Count == 0) { return(ProcessingResult <ICroppedArea> .Failure(new EmptyInput())); } using (areaToClean) { try { var cleanedParts = new List <OrderedBitmap>(); foreach (var croppedPart in areaToClean.CroppedParts) { Bitmap formattedImage = Grayscale.CommonAlgorithms.RMY.Apply(croppedPart.Bitmap); new BradleyLocalThresholding().ApplyInPlace(formattedImage); cleanedParts.Add(new OrderedBitmap(croppedPart.Order, formattedImage)); } return(ProcessingResult <ICroppedArea> .Success( new CroppedArea(areaToClean.AreaUsedForCropping, cleanedParts, areaToClean.DocumentId))); } catch (UnsupportedImageFormatException formatException) { return(ProcessingResult <ICroppedArea> .Failure(new UnsupportedImageFormat())); } catch (Exception generalException) { return(ProcessingResult <ICroppedArea> .Failure(new UncaughtException(generalException))); } } }
public ProcessingResult <ICroppedArea> ExtractRegionOfInterestFrom(ICroppedArea areaOfExtraction) { using (areaOfExtraction) { List <OrderedBitmap> extractedRegionsOfInterest = new List <OrderedBitmap>(); foreach (var areaPart in areaOfExtraction.CroppedParts) { var partBytes = areaPart.Bitmap.AsFlattenedByteArray(); var leftEdge = GetLeftEdgeOfRegionOfInterest(partBytes); var rightEdge = GetRightEdgeOfRegionOfInterest(partBytes); var topEdge = GetTopEdgeOfRegionOfInterest(partBytes); var bottomEdge = GetBottomEdgeOfRegionOfInterest(partBytes); if (leftEdge == null || rightEdge == null || topEdge == null || bottomEdge == null || (bottomEdge.Value - topEdge.Value) < 10) { //Nothing interesting - return empty result extractedRegionsOfInterest.Add(new OrderedBitmap(areaPart.Order, null)); continue; } var size = new Dimension2D(rightEdge.Value - leftEdge.Value + 1, bottomEdge.Value - topEdge.Value + 1) .GetInputSizeAsMultipliesOfFour().AsSize(); var croppedBitmap = new Crop(new Rectangle(new Point(leftEdge.Value, topEdge.Value), size)).Apply(areaPart.Bitmap); extractedRegionsOfInterest.Add(new OrderedBitmap(areaPart.Order, croppedBitmap)); } return(ProcessingResult <ICroppedArea> .Success( new CroppedArea(areaOfExtraction.AreaUsedForCropping, extractedRegionsOfInterest, areaOfExtraction.DocumentId))); } }
public ProcessingResult <FlattenedCroppedArea> ScaleAndFlatten(ICroppedArea croppedArea) { FlattenedCroppedArea output; using (croppedArea) { var flattenedResults = new List <OrderedFlattenedBitmap>(); foreach (var croppedAreaPart in croppedArea.CroppedParts) { if (croppedAreaPart.Bitmap == null) { flattenedResults.Add(new OrderedFlattenedBitmap(croppedAreaPart.Order, null)); continue; } //EMNIST expects black background and white text new Invert().ApplyInPlace(croppedAreaPart.Bitmap); //Bitmap will force stride to be 4x, so let's avoid mess up and do it ourselves var inputSizeAsMultipliesOfFour = GetInputSizeFromBitmap(croppedAreaPart.Bitmap).GetInputSizeAsMultipliesOfFour(); //Downscale to height = 28 and width scaled by same factor var inputSizeAfterDownscale = GetDimensionsForResizeBeforeCentering(inputSizeAsMultipliesOfFour); //And fix it to mulitple of 4 again inputSizeAfterDownscale = inputSizeAfterDownscale.GetInputSizeAsMultipliesOfFour(); //Blur it a little for better feature recognition new GaussianBlur(0.4, 3).ApplyInPlace(croppedAreaPart.Bitmap); //Resize to multiples of four using (var bitmapFixedToMultiplyOfFour = new ResizeBilinear(inputSizeAsMultipliesOfFour.Width, inputSizeAsMultipliesOfFour.Height) .Apply(croppedAreaPart.Bitmap)) //Resize to 28*XX using (var bitmapDownscaledToExpectedHeight = new ResizeBilinear(inputSizeAfterDownscale.Width, inputSizeAfterDownscale.Height).Apply( bitmapFixedToMultiplyOfFour)) //using (var bitmapResized = // new ResizeBilinear(28, 28) // .Apply(croppedAreaPart.Bitmap)) { //Resize canvas and center to 28*28 flattenedResults.Add(new OrderedFlattenedBitmap(croppedAreaPart.Order, CreatedSquaredAndCenteredBitmapFrom(bitmapDownscaledToExpectedHeight))); } } output = new FlattenedCroppedArea(croppedArea.AreaUsedForCropping, flattenedResults, croppedArea.DocumentId); } //Make bitmap squared instead of rectangular return(ProcessingResult <FlattenedCroppedArea> .Success(output)); }
private RecognizedTextPart RecognizeTextPart(OrderedBitmap page, IActorRef selfRef, DocumentToProcess document, ICroppedArea croppedPart, FlattenedCroppedArea downscaleResult) { var recognitionResult = _handwrittenInputRecognizer.RecognizeFrom(downscaleResult).Result; recognitionResult.AreaName = croppedPart.AreaUsedForCropping.AreaName; selfRef.Tell(new DocumentPageTextAreaProcessingCompleted(document.Id, (int)page.Order, croppedPart.AreaUsedForCropping.AreaName, recognitionResult)); return(recognitionResult as RecognizedTextPart); }
private RecognizedChoicePart RecognizeChoicePart(OrderedBitmap page, IActorRef senderRef, DocumentToProcess document, ICroppedArea croppedPart, ICroppedArea roiExtractResult) { var recognitionResult = _handwrittenChoiceRecognizer.RecognizeFrom(roiExtractResult).Result as RecognizedChoicePart; recognitionResult.AreaName = croppedPart.AreaUsedForCropping.AreaName; senderRef.Tell(new DocumentPageChoiceAreaProcessingCompleted(document.Id, (int)page.Order, croppedPart.AreaUsedForCropping.AreaName, recognitionResult)); return(recognitionResult); }
public ProcessingResult <IRecognizedPart <bool> > RecognizeFrom(ICroppedArea croppedArea) { if (croppedArea.CroppedParts.Count != 1) { //TODO: ResultDetails for - expected only one element return(ProcessingResult <IRecognizedPart <bool> > .Failure(null)); } var croppedElement = croppedArea.CroppedParts.Single(); if (croppedElement == null) { //TODO: ResultDetails for - empty element in CroppedArea return(ProcessingResult <IRecognizedPart <bool> > .Failure(null)); } return(ProcessingResult <IRecognizedPart <bool> > .Success( new RecognizedChoicePart(croppedArea.AreaUsedForCropping.Id, croppedArea.DocumentId, //Empty field - no input; otherwise - is checked via handwriting croppedElement.Bitmap != null))); }