public OperationResult <DocumentToProcess> Handle(GetDocumentToProcess query) { if (query == null || query.DocumentId == Guid.Empty) { return(OperationResult <DocumentToProcess> .Failure(new EmptyInput())); } DocumentToProcessPersistenceModel documentToProcessPersistenceModel; using (var context = new ImageProcessingContext(_persistenceConfiguration)) { documentToProcessPersistenceModel = context.DocumentsToProcess.Single(doc => doc.Id == query.DocumentId); } //TODO: Separating .Templating and .ImageProcessing into microservices will force this query to be HTTP request, not DB call var getTemplateResult = _getTemplateById.Handle(new GetTemplateDefinitionById { TemplateId = documentToProcessPersistenceModel.TemplateDefinitionIdentifier }); if (!getTemplateResult.Successful) { return(OperationResult <DocumentToProcess> .Failure(getTemplateResult.Details)); } TemplateDefinition templateDefinition = getTemplateResult.Result; var bitmaps = _getDocumentBitmapsHandler.Handle(new GetBitmapsForDocumentToProcess { DocumentId = query.DocumentId }); if (!bitmaps.Success) { //TODO: FileStorageQueryFailed return(OperationResult <DocumentToProcess> .Failure(null)); } var documentToProcess = new DocumentToProcess(documentToProcessPersistenceModel.RequesterIdentifier, templateDefinition, documentToProcessPersistenceModel.Id); foreach (var bitmap in bitmaps.Result) { documentToProcess.AddDocumentPageBitmap(bitmap.AsOrderedBitmap()); } return(OperationResult <DocumentToProcess> .Success(documentToProcess)); }
public ProcessingResult <Bitmap> ExtractHandwrittenInput(Bitmap userInput, ITemplatePageDefinition templatePage) { if (userInput == null || templatePage == null) { return(ProcessingResult <Bitmap> .Failure(new EmptyInput())); } using (userInput) { Bitmap canvasBitmap = null; try { canvasBitmap = _canvasBitmapQueryHandler.Handle( new GetCanvasBitmapForTemplatePage { TemplatePageId = templatePage.Id }).FileData.AsBitmap(); if (canvasBitmap == null) { //TODO: ResultMessage for failed HTTP request or pipe from ResourceQueryHandler return(new ProcessingResult <Bitmap>(StepOutcome.Failure, null, null)); } if (canvasBitmap.Size != userInput.Size) { using (var oldCanvas = canvasBitmap) { canvasBitmap = new ResizeBilinear(userInput.Width, userInput.Height).Apply(oldCanvas); } } var result = new Invert().Apply(new Difference(userInput).Apply(canvasBitmap)); return(ProcessingResult <Bitmap> .Success(result)); } catch (UnsupportedImageFormatException) { return(ProcessingResult <Bitmap> .Failure(new UnsupportedImageFormat())); } finally { canvasBitmap?.Dispose(); } } }
//TODO: Better error handling public Dictionary <int, char> FetchMapFromUrl(ImageRecognitionExpectedData expectedData) { try { var labelMap = new Dictionary <int, char>(); var content = _getLabelMapsHandler.Handle(new GetImageRecognitionLabelMap { ExpectedData = expectedData }); foreach (var line in content.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) { var parts = line.Split(' '); labelMap.Add(int.Parse(parts[0]), (char)int.Parse(parts[1])); } return(labelMap); } catch { return(null); } }
public FhirResponse Handle(IKey key) { Validate.HasTypeName(key); Validate.HasResourceId(key); Validate.HasNoVersion(key); Validate.Key(key); var hl7Type = key.TypeName.GetHl7ModelType(); //Startup.AutofacContainer.Resolve<ISynapseResourceService<CorePerson>>(); var modelFactory = this._provider.GetService(typeof(ResourceQueryHandlerFactory)) as ResourceQueryHandlerFactory; IResourceQueryHandler synapseModelHandler = modelFactory.GetHandler(hl7Type); var fhirParam = FHIRParam.Create(key.TypeName, key.ResourceId, key.VersionId); var resourceData = synapseModelHandler.Handle(fhirParam); var fhirResponseFactory = this._provider.GetService(typeof(IFhirResponseFactory)) as IFhirResponseFactory; return(fhirResponseFactory.GetFhirResponse(resourceData, key)); }
public ProcessingResult <IRecognizedPart <string> > RecognizeFrom(FlattenedCroppedArea croppedArea) { var expectedData = croppedArea.AreaUsedForCropping.ExpectedData.AsFileStorageModel(); var modelBytes = _getRecognitionModelHandler.Handle( new GetImageRecognitionModel { ExpectedData = expectedData }); var model = Function.Load(modelBytes, _device); try { var areaPartsPredictions = new List <OrderedRecognitionOutput>(); foreach (var flattenedBitmap in croppedArea.FlattenedBitmaps.OrderBy(fb => fb.Order)) { if (flattenedBitmap.FlattenedBitmap == null) { areaPartsPredictions.Add(new OrderedRecognitionOutput(default(char), 1f, flattenedBitmap.Order)); continue; } Variable inputVar = model.Arguments[0]; NDShape inputShape = inputVar.Shape; List <float> flattenedBitmapFeatures = flattenedBitmap.FlattenedBitmap.Select(feat => (float)feat).ToList(); // Create input data map var inputDataMap = new Dictionary <Variable, Value>(); var inputVal = Value.CreateBatch(inputShape, flattenedBitmapFeatures, _device); inputDataMap.Add(inputVar, inputVal); Variable outputVar = model.Output; // Create output data map. Using null as Value to indicate using system allocated memory. // Alternatively, create a Value object and add it to the data map. var outputDataMap = new Dictionary <Variable, Value>(); outputDataMap.Add(outputVar, null); // Start evaluation on the device model.Evaluate(inputDataMap, outputDataMap, _device); // Get evaluate result as dense output var outputVal = outputDataMap[outputVar]; var outputData = outputVal.GetDenseData <float>(outputVar); Dictionary <int, float> outputPred = new Dictionary <int, float>(); for (int i = 0; i < outputData[0].Count; i++) { outputPred.Add(i, outputData[0][i]); } var topPrediction = outputPred.GetTopRelativePrediction(); areaPartsPredictions.Add(new OrderedRecognitionOutput( _labelMapConfiguration.GetCharFromLabel(croppedArea.AreaUsedForCropping.ExpectedData, topPrediction.EvaluationLabel), topPrediction.RelativePercentageScore, flattenedBitmap.Order)); } return(ProcessingResult <IRecognizedPart <string> > .Success( new RecognizedTextPart(croppedArea.AreaUsedForCropping.Id, areaPartsPredictions, croppedArea.DocumentId))); } catch (Exception ex) { //TODO: Return failure throw ex; } }