public static async Task <OCRData> Detect(IEditableImage Img, Context Ctx, int RectWidth, int RectHeight) { if (TesAPI == null) { TesAPI = new TesseractApi(Ctx, AssetsDeployment.OncePerInitialization); await TesAPI.Init("eng"); TesAPI.SetVariable("tessedit_char_whitelist", "0123456789kmKM"); } OCRData Result = new OCRData(); Result.PngOriginal = Img.ToPng(); // Crop the detection region Img = Img.Crop((Img.Width / 2) - (RectWidth / 2), (Img.Height / 2) - (RectHeight / 2), RectWidth, RectHeight).ToMonochrome(); Result.PngCropped = Img.ToPng(); using (MemoryStream PngImage = new MemoryStream()) { using (Bitmap Pic = ProcessImage((Bitmap)Img.GetNativeImage())) await Pic.CompressAsync(Bitmap.CompressFormat.Png, 100, PngImage); PngImage.Seek(0, SeekOrigin.Begin); await TesAPI.SetImage(PngImage); Result.PngProcessed = PngImage.ToArray(); } Result.Text = TesAPI.Text; return(Result); }
async partial void BtnCropping_Activated(UIBarButtonItem sender) { Console.WriteLine(imagenToCrop.Image.Orientation); Console.WriteLine(imagenToCrop.Image.AccessibilityFrame); var croppedCGImage = imagenToCrop.Image.CGImage.WithImageInRect(cropArea); var croppedImage = new UIImage(croppedCGImage); imagenToCrop.Image = croppedImage; ScrollViewFoto.ZoomScale = 1; TesseractApi api = new TesseractApi(); if (await api.Init("eng")) { if (await api.SetImage(imagenToCrop.Image.ToNSData())) { //lblTexto.Text = api.Text; var okAlertController = UIAlertController.Create("Encontrado:", api.Text, UIAlertControllerStyle.Alert); okAlertController.AddAction(UIAlertAction.Create("Ok", UIAlertActionStyle.Default, null)); PresentViewController(okAlertController, true, null); } } }
//Api starten und Trainingsdaten laden public static async Task <bool> initTes(TesseractApi api) { OCR.api = api; bool initialised = await api.Init("eng"); return(initialised); }
private async Task _GetText(Bitmap cropped) { if (!TesseractApi.Initialized) { await TesseractApi.Init("eng"); } TesseractApi.SetRectangle(new Tesseract.Rectangle((int)(cropped.Width * 0.24), 10, (int)(cropped.Width * 0.35), (int)(cropped.Height * 0.065))); TesseractApi.SetWhitelist("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"); if (await TesseractApi.SetImage(File.OpenRead(Path.ChangeExtension(ImagePath, "card")))) { string s = TesseractApi.Text; if (Name == "" || Name == "NO TEXT DETECTED") { if (s != "") { if (s != "" && s[0].ToString() == s[0].ToString().ToLower()) { s = s.Substring(1, s.Length - 1); } TextInfo textInfo = new CultureInfo("en-US", false).TextInfo; s = textInfo.ToTitleCase(s.ToLower()); } else { s = "NO TEXT DETECTED"; } Name = s; } } }
async Task <TesseractApi> InitaliazeApi() { var api = new TesseractApi(this.Context, AssetsDeployment.OncePerVersion); await api.Init("ocrb"); api.SetWhitelist("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789<"); api.Progress += ScanProgress; return(api); }
public override void ViewDidLoad() { base.ViewDidLoad(); OCR = new TesseractApi(); CaptureSession = new AVCaptureSession(); ImageOutput = new AVCapturePhotoOutput(); var cameraDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); var cameraInput = AVCaptureDeviceInput.FromDevice(cameraDevice); CaptureSession.AddInput(cameraInput); CaptureSession.AddOutput(ImageOutput); SetupUI(); CaptureSession.StartRunning(); Camera = new CameraHandler(); Camera.FinishedProcessing += async delegate { PictureView.Image = new UIImage(Camera.Picture, 1f, UIImageOrientation.Right); Capture = PictureView.Capture(); await InitReader(); }; OCRButton.TouchUpInside += async delegate { HandleButtonClick(); }; AlphaNumericSwitch.ValueChanged += async delegate { await SetOcrTextLabel(); }; // Selection slider Setup SelectionBarSlider.TouchUpInside += async delegate { await InitReader(); }; SelectionBarSlider.TouchUpOutside += async delegate { await InitReader(); }; SelectionBarSlider.ValueChanged += delegate { var tempFrame = SelectionBarView.Frame; tempFrame.Y = (SelectionBarSlider.Value * 92) + 22; SelectionBarView.Frame = tempFrame; }; }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); SetContentView(Resource.Layout.Main); _api = new TesseractApi(this, AssetsDeployment.OncePerInitialization); _api.Init("eng"); SurfaceView cameraSurface = FindViewById <SurfaceView> (Resource.Id.cpPreview); ISurfaceHolder holder = cameraSurface.Holder; holder.AddCallback(this); holder.SetType(SurfaceType.PushBuffers); }
public async void TestWithoutDispiose () { ITesseractApi api = new TesseractApi (Android.App.Application.Context, AssetsDeployment.OncePerInitialization); await api.Init ("eng"); for (int i = 0; i < 20; i++) { using (var stream = TesseractApiRecogniseTest.LoadSample ("sample2.png")) { var result = await api.SetImage (stream); Assert.IsTrue (result); Assert.AreEqual ("ABCDE FGHI\nJKLHN OPQR\nSTUVVJXYZ", api.Text); } } api.Dispose (); }
public async void TestWithoutDispiose () { ITesseractApi api = new TesseractApi (); await api.Init ("eng"); for (int i = 0; i < 20; i++) { using (var stream = TesseractApiRecogniseTest.LoadSample ("sample2.png")) { var result = await api.SetImage (stream); Assert.IsTrue (result); Assert.AreEqual ("ABCDE FGHI\nJKLHN OPQR\nSTUVVJXYZ\n\n", api.Text); } } api.Dispose (); }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); SetContentView(Resource.Layout.main); var button = FindViewById <Button>(Resource.Id.button); api = new TesseractApi(this, AssetsDeployment.OncePerVersion); initialize(); button.Click += delegate { ExtractText(); }; CrossCurrentActivity.Current.Init(this, bundle); }
public async void TestWithoutDispiose() { ITesseractApi api = new TesseractApi(); await api.Init("eng"); for (int i = 0; i < 20; i++) { using (var stream = TesseractApiRecogniseTest.LoadSample("sample2.png")) { var result = await api.SetImage(stream); Assert.IsTrue(result); Assert.AreEqual("ABCDE FGHI\nJKLHN OPQR\nSTUVVJXYZ\n\n", api.Text); } } api.Dispose(); }
public async void TestWithoutDispiose() { ITesseractApi api = new TesseractApi(Android.App.Application.Context, AssetsDeployment.OncePerInitialization); await api.Init("eng"); for (int i = 0; i < 20; i++) { using (var stream = TesseractApiRecogniseTest.LoadSample("sample2.png")) { var result = await api.SetImage(stream); Assert.IsTrue(result); Assert.AreEqual("ABCDE FGHI\nJKLHN OPQR\nSTUVVJXYZ", api.Text); } } api.Dispose(); }
async Task <string> IORC.ORCWorkDashbordVehicle(byte[] data) { string textResult = null; ITesseractApi api = new TesseractApi(Android.App.Application.Context, AssetsDeployment.OncePerVersion); bool initialised = await api.Init("eng"); if (initialised) { bool success = await api.SetImage(data); if (success) { textResult = api.Text; } } return(textResult); }
private void Initialize() { _tesseractApi = new TesseractApi(Context, AssetsDeployment.OncePerVersion); setWhitelist = new CallOnce(() => { _tesseractApi.SetWhitelist("あいうえおかきくけこさしすせそたちつてとなにぬねのはひふへほまみむめももやゆよらりるれろわゐゑをがぎぐげござじずぜぞだぢづでどばびぶべぼぱぴぷぺぽんアイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワヲン"); }); currentText = string.Empty; SetBackgroundColor(Color.White); _path = new Path(); _mPaint = new Paint { Dither = true, Color = Color.Black, StrokeJoin = Paint.Join.Round, StrokeCap = Paint.Cap.Round, StrokeWidth = 15 }; _mPaint.SetStyle(Paint.Style.Stroke); _tesseractApi.Init("jpn"); }
protected override async void OnActivityResult(int requestCode, [GeneratedEnum] Result resultCode, Intent data) { base.OnActivityResult(requestCode, resultCode, data); if (requestCode == PickImageId) { if (resultCode == Result.Ok && Intent != null) { Uri uri = data.Data; imageView.SetImageURI(uri); Stream stream = ContentResolver.OpenInputStream(uri); TesseractApi api = new TesseractApi(this, AssetsDeployment.OncePerVersion); await api.Init("chi_sim"); await api.SetImage(PathUtil.GetActualPathFromFile(this, data.Data)); string text = api.Text; textView.SetText(text, BufferType.Normal); } } }
private bool DoOcr() { _logger.LogInformation($"Starting OCR for {_bluraySubtitles.Count} items..."); try { TesseractApi.Initialize(); using (var engine = new TesseractEngine(TesseractDataPath, TesseractLanguage, EngineMode.TesseractOnly)) { for (var i = 0; i < _bluraySubtitles.Count; i++) { var item = _bluraySubtitles[i]; var paragraph = new Paragraph { Number = i + 1, StartTime = new TimeCode(item.StartTime / 90.0), EndTime = new TimeCode(item.EndTime / 90.0), Text = GetText(engine, i) }; _subtitle.Paragraphs.Add(paragraph); if (i % 50 == 0) { _logger.LogInformation($"Processed item {paragraph.Number}."); } } _logger.LogInformation("Finished OCR."); return(true); } } catch (Exception e) { _logger.LogError(e, "Error: " + e.Message + e.StackTrace); return(false); } }
public async Task <Tuple <string, ImageSource> > AndroidTesseractStart(ImageSource image) { Context context = Android.App.Application.Context; string result = ""; ImageSource resultImg = image; // Default try { string whitelist = "01234556789"; whitelist += "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; whitelist += "abcdefghijklmnopqrstuvwxyz"; TesseractApi api = new TesseractApi(context, AssetsDeployment.OncePerInitialization); await api.Init("eng"); api.SetWhitelist(whitelist); Bitmap bitmap = await GetBitmapFromImageSourceAsync(image, context); //ImageSource -> Bitmap Bitmap rebitmap = BitMapWidthCutting(bitmap); //BitMap Width cut in half rebitmap = BitMapLineDelete(rebitmap, 2); //BitMap Line Delete //BitMapChack(rebitmap); //Console Write BitMap resultImg = BMPtoImgsource(rebitmap); // BitMap -> ImageSource byte[] bitmapData = ConvertBitmapToByte(rebitmap); //BitMap - > Byte[] bool success = await api.SetImage(bitmapData); if (success) { result = api.Text; } return(Tuple.Create(result, resultImg)); }catch (Exception e) { return(Tuple.Create(e.Message, resultImg));; } }
public void Setup() { _api = new TesseractApi(); }
public void Setup () { _api = new TesseractApi (); }
public void Setup () { _api = new TesseractApi (Android.App.Application.Context, AssetsDeployment.OncePerInitialization); }
public void Setup () { _api = new TesseractApi (Android.App.Application.Context); }
protected override async void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); //Set View SetContentView(Resource.Layout.TestArea); //Open CV mOpenCvCameraView = FindViewById <CameraBridgeViewBase>(Resource.Id.TestGreyView); mOpenCvCameraView.Visibility = ViewStates.Visible; mOpenCvCameraView.SetCvCameraViewListener2(this); mLoaderCallback = new Callback(this, mOpenCvCameraView); //Textview textseekThresh = FindViewById <TextView>(Resource.Id.TestAreaTextSeek1); textseekBlur = FindViewById <TextView>(Resource.Id.TestAreaTextSeek2); textseekSize = FindViewById <TextView>(Resource.Id.TestAreaTextSeek3); textResult = FindViewById <TextView>(Resource.Id.TestAreaResultText); //Get Buttons buttonDetectText = FindViewById <Button>(Resource.Id.TestAreaButtonDetectText); buttonSelectImageGallery = FindViewById <Button>(Resource.Id.TestAreaSelectImageGallery); buttonExtractText = FindViewById <Button>(Resource.Id.TestAreaButtonExtractText); buttonGrey = FindViewById <Button>(Resource.Id.TestAreaButtonGrey); buttonSize = FindViewById <Button>(Resource.Id.TestAreaButtonSize); //ImageView imgInput = FindViewById <ImageView>(Resource.Id.TestAreaImageView); imgResult = FindViewById <ImageView>(Resource.Id.TestAreaImageResultGrey); //Event Listeners buttonGrey.Click += delegate { Bitmap img = ((BitmapDrawable)imgInput.Drawable).Bitmap; double thresh = Convert.ToDouble(textseekThresh.Text.ToString()); double blur = Convert.ToDouble(textseekBlur.Text.ToString()); Bitmap result = ImageOp.greyImg(img, thresh, blur); imgResult.SetImageBitmap(result); }; buttonDetectText.Click += delegate { Bitmap img = ((BitmapDrawable)imgResult.Drawable).Bitmap; Bitmap result = ImageOp.detectTextRect(img); imgResult.SetImageBitmap(result); }; buttonExtractText.Click += async delegate { Bitmap img = ((BitmapDrawable)imgResult.Drawable).Bitmap; string result = await ImageOp.detectAndExtractText(img); textResult.Text = result; }; buttonSize.Click += delegate { Bitmap img = ((BitmapDrawable)imgInput.Drawable).Bitmap; double size = Convert.ToDouble(textseekSize.Text.ToString()); size = size / 100; Bitmap imgTemp = ImageOp.resizeImage(img, size, size); double thresh = Convert.ToDouble(textseekThresh.Text.ToString()); double blur = Convert.ToDouble(textseekBlur.Text.ToString()); Bitmap result = ImageOp.greyImg(imgTemp, thresh, blur); imgResult.SetImageBitmap(result); }; buttonSelectImageGallery.Click += delegate { var imageIntent = new Intent(); imageIntent.SetType("image/*"); imageIntent.SetAction(Intent.ActionGetContent); StartActivityForResult(Intent.CreateChooser(imageIntent, "Select photo"), 0); }; //Slider seekThresh = FindViewById <SeekBar>(Resource.Id.TestAreaSeekBar1); seekBlur = FindViewById <SeekBar>(Resource.Id.TestAreaSeekBar2); seekSize = FindViewById <SeekBar>(Resource.Id.TestAreaSeekBar3); //Slider Listener seekThresh.ProgressChanged += (object sender, SeekBar.ProgressChangedEventArgs e) => { if (e.FromUser) { textseekThresh.Text = string.Format("" + e.Progress); } }; seekBlur.ProgressChanged += (object sender, SeekBar.ProgressChangedEventArgs e) => { if (e.FromUser) { textseekBlur.Text = string.Format("" + e.Progress); } }; seekSize.ProgressChanged += (object sender, SeekBar.ProgressChangedEventArgs e) => { if (e.FromUser) { textseekSize.Text = string.Format("" + e.Progress); } }; // Tesseract Api einmal bei start erzeugen api = new TesseractApi(this, AssetsDeployment.OncePerVersion); bool check = await OCR.initTes(api); }
public void Tear() { _api.Dispose(); _api = null; }
public async Task onPictureTakeAsync(byte[] data, Camera camera) { /*ContextWrapper cw = new ContextWrapper(ApplicationContext); imageFileFolder = cw.GetExternalFilesDir(Android.OS.Environment.DirectoryPictures); Calendar c = Calendar.Instance; imageFileName = new Java.IO.File(imageFileFolder, c.Time.Seconds + ".bmp"); imageFileName.CreateNewFile(); using (var os = new FileStream(imageFileName.AbsolutePath, FileMode.Create)) { os.Write(data, 0, data.Length); } */ TesseractApi tesseractApi = new TesseractApi(ApplicationContext, AssetsDeployment.OncePerInitialization); if (!tesseractApi.Initialized) await tesseractApi.Init("eng"); var tessResult = await tesseractApi.SetImage(data); if (tessResult) { var a = tesseractApi.Text; var b = a; } Bitmap cameraBitmap = BitmapFactory.DecodeByteArray(data, 0, data.Length); int wid = cameraBitmap.Width; int hgt = cameraBitmap.Height; Bitmap resultImage = Bitmap.CreateBitmap(wid, hgt, Bitmap.Config.Argb8888); Canvas canvas = new Canvas(resultImage); canvas.DrawBitmap(cameraBitmap, 0f, 0f, null); image.DrawingCacheEnabled = true; image.Measure(MeasureSpec.MakeMeasureSpec(300, MeasureSpecMode.Exactly), MeasureSpec.MakeMeasureSpec(300, MeasureSpecMode.Exactly)); image.Layout(0, 0, image.MeasuredWidth, image.MeasuredHeight); image.BuildDrawingCache(true); Bitmap layoutBitmap = Bitmap.CreateBitmap(image.DrawingCache); image.DrawingCacheEnabled = false; canvas.DrawBitmap(layoutBitmap, 80f, 0f, null); ContextWrapper cw = new ContextWrapper(ApplicationContext); imageFileFolder = cw.GetExternalFilesDir(Android.OS.Environment.DirectoryPictures); imageFileName = new Java.IO.File(imageFileFolder, DateTime.Now.Ticks.ToString() + ".jpg"); imageFileName.CreateNewFile(); try { using (var os = new FileStream(imageFileName.AbsolutePath, FileMode.Create)) { resultImage.Compress(Bitmap.CompressFormat.Jpeg, 95, os); } } catch (Exception e) { Log.Debug("In Saving File", e + ""); } dialog.Dismiss(); var activity = new Intent(this, typeof(ImageActivity)); activity.PutExtra("AbsolutePath", imageFileName.AbsolutePath); StartActivity(activity); Finish(); //StartActivity(typeof(ImageActivity)); }
public void Tear () { _api.Dispose (); _api = null; }
protected override async void OnActivityResult(int requestCode, Android.App.Result resultCode, Intent data) { base.OnActivityResult(requestCode, resultCode, data); ProgressDialog progress = new ProgressDialog(this); progress.Indeterminate = true; progress.SetProgressStyle(ProgressDialogStyle.Spinner); progress.SetMessage("Kraunama..."); progress.SetCancelable(false); progress.Show(); try { System.Console.WriteLine("OnActivityLoad eventas"); Intent mediaScanIntent = new Intent(Intent.ActionMediaScannerScanFile); Uri contentUri = Uri.FromFile(Failosas._file); mediaScanIntent.SetData(contentUri); // SendBroadcast(mediaScanIntent); int height = Android.Content.Res.Resources.System.DisplayMetrics.HeightPixels; int width = Android.Content.Res.Resources.System.DisplayMetrics.WidthPixels; Failosas.bitmap = Failosas._file.Path.LoadAndResizeBitmap(width, height); if (Failosas.bitmap != null) { System.Console.WriteLine("Got bitmap, doing OCR..."); TesseractApi api = new TesseractApi(mContext, AssetsDeployment.OncePerInitialization); await api.Init("eng"); api.SetPageSegmentationMode(Tesseract.PageSegmentationMode.SparseText); await api.SetImage(Failosas._file.Path); RunOnUiThread(() => { string text = api.Text; System.Console.Error.WriteLine("Gautas text: " + text); Android.Widget.Toast.MakeText(this, "Pagaliau! :D", Android.Widget.ToastLength.Long).Show(); progress.Hide(); Android.Support.V7.App.AlertDialog.Builder alert = new Android.Support.V7.App.AlertDialog.Builder(this); alert.SetTitle("Gavom teksta"); alert.SetMessage(text); alert.SetPositiveButton("nieko gero! :D", (senderAlert, args) => { Android.Widget.Toast.MakeText(this, "Gerai kad supratai!", Android.Widget.ToastLength.Short).Show(); }); Dialog dialog = alert.Create(); dialog.Show(); }); } else { Android.Widget.Toast.MakeText(this, "Blogas image!", Android.Widget.ToastLength.Short).Show(); } } catch (Exception ex) { System.Console.WriteLine("Klaida darant OCR: " + ex.ToString()); progress.Hide(); Android.Widget.Toast.MakeText(this, "Klaida darant OCR!", Android.Widget.ToastLength.Long).Show(); } finally { GC.Collect(); progress.Hide(); } }
public void Setup() { _api = new TesseractApi(Application.Context, AssetsDeployment.OncePerInitialization); }