private void OCRDebugLayoutPanel_DragDrop(object sender, DragEventArgs e) { if (!(e.Data.GetData(DataFormats.FileDrop) is string[] files && files.Any())) { return; } cbEnableOutput.Checked = true; try { var bmp = new Bitmap(files[0]); if (bmp.Width == nudResolutionWidth.Value && bmp.Height == nudResolutionHeight.Value) { bmp.Dispose(); DoOcr?.Invoke(files[0], true); } else { DisplayBmpInOcrControl(bmp); } } catch (Exception) { // ignore } }
public static String GetOcrResponse(Point Location, Size SizeOfRec, string OCREngineMode = "1") { string APIResponse = string.Empty; Task task = Task.Factory.StartNew(() => { APIResponse = DoOcr.DoAsync(Location, SizeOfRec, OCREngineMode).Result; }); task.Wait(); return(APIResponse); }
private void OCRDebugLayoutPanel_DragDrop(object sender, DragEventArgs e) { if (!(e.Data.GetData(DataFormats.FileDrop) is string[] files && files.Any())) { return; } cbEnableOutput.Checked = true; try { var bmp = new Bitmap(files[0]); if (nudResolutionWidth.Value == 0 && nudResolutionHeight.Value == 0) { nudResolutionWidth.Value = bmp.Width; nudResolutionHeight.Value = bmp.Height; } if (bmp.Width == nudResolutionWidth.Value && bmp.Height == nudResolutionHeight.Value) { bmp.Dispose(); if (!ArkOcr.Ocr.ocrConfig?.RecognitionPatterns?.Texts?.Any() ?? false) { ArkOcr.Ocr.ocrConfig.RecognitionPatterns.TrainingSettings.IsTrainingEnabled = true; } DoOcr?.Invoke(files[0], true, false); } else { DisplayBmpInOcrControl(bmp); txtOCROutput.Text = $"Error: the current ocr configuration is set to an image size of{Environment.NewLine}{nudResolutionWidth.Value} × {nudResolutionHeight.Value} px,{Environment.NewLine}the image has a size of{Environment.NewLine}{bmp.Width} × {bmp.Height} px."; } } catch (Exception ex) { txtOCROutput.Text = $"Error during the OCR:{Environment.NewLine}{ex.Message}"; } }