async Task <List <CroppedImage> > CropImageAsync(Bitmap original, PredictionResultViewModel predictionResultModel, double probability) { var croppedImages = new List <CroppedImage>(); // An empty bitmap which will hold the cropped image foreach (var image in predictionResultModel.Predictions) { if (image.Probability > probability) { using (var bmp = new Bitmap((int)(predictionResultModel.OriginalWidth * image.BoundingBox.Width), (int)(predictionResultModel.OriginalHeight * image.BoundingBox.Height))) { var section = new Rectangle((int)(predictionResultModel.OriginalWidth * image.BoundingBox.Left), (int)(predictionResultModel.OriginalHeight * image.BoundingBox.Top), (int)(predictionResultModel.OriginalWidth * image.BoundingBox.Width), (int)(predictionResultModel.OriginalHeight * image.BoundingBox.Height)); var g = Graphics.FromImage(bmp); // Draw the given area (section) of the source image // at location 0,0 on the empty bitmap (bmp) g.DrawImage(original, 0, 0, section, GraphicsUnit.Pixel); using (var memoryStream = new MemoryStream()) { bmp.Save(memoryStream, ImageFormat.Jpeg); var handwrittenResult = await _handwrittenRecognitionService.AnalyzeImageAsync(memoryStream.ToArray()); if (handwrittenResult == null || handwrittenResult.RecognitionResult == null || handwrittenResult.RecognitionResult.Lines == null || handwrittenResult.RecognitionResult.Lines.Count == 0 || handwrittenResult.RecognitionResult.Lines.Count(l => l.Words.Count == 0) > 0) { continue; } var id = Guid.NewGuid(); string fileName = $"{ original.Tag }_{ id }.jpg"; string imageUrl = await _uploadToAzureStorageService.UploadFileAsync(memoryStream.ToArray(), fileName, await _keyVaultService.GetSecretAsync("AzureStorageAccountAccessKey"), "croppedimages"); croppedImages.Add(new CroppedImage { ImageUrl = imageUrl, HandwrittenResult = handwrittenResult }); } } } } return(croppedImages); }
public async Task <IActionResult> Index(DocumentUploadViewModel model) { ViewBag.OcrResult = await _opticalCharacterRecognitionService.AnalyzeImageAsync(model.File); ViewBag.HandwrittenRecognitionResult = await _handwrittenRecognitionService.AnalyzeImageAsync(model.File); ViewBag.ResultImages = await _tagAndAnalyzeService.AnalyzeImageAsync(model.File, model.PredictionPercentage); return(View(model)); }