/// <summary> /// Extract japanese sentences from a gray image. /// </summary> /// <param name="grayImage"></param> public void SearchOCRSentences(GrayImage grayImage) { if (MainWindow.UserPrefs.OcrEngine == OcrEngineType.NlpJdict) { SearchImage(grayImage); } }
public static List <TextBlock> RunOcr(GrayImage image, Ocr.RunOcrHandler runOcrHandler = null) { try { bool isValidSize = CheckImageSize(image.Width, image.Height); if (!isValidSize) { return(null); } if (runOcrHandler == null) { if (MainWindow.UserPrefs.IsOcrDebugMode) { return(Jocr.Ocr.DebugRecognizeSentences(image, JorcImageConvert.ShowJocrGrayImage)); } else { return(Jocr.Ocr.RecognizeSentences(image)); } } else { return(runOcrHandler(image)); } } catch (Exception e) { UIUtilities.ShowErrorDialog("RunOcr: " + e.Message + "\n" + e.StackTrace); return(null); } }
protected override void ApplyFilter() { // get source image size int width = _sourceData[0].GetLength(0), height = _sourceData[0].GetLength(1); int channels = _sourceData.Length; // Estimate a good filter size for the gaussian. // Note that gaussian isn't an ideal bandpass filter // so this is an experimentally determined quantity double std = (width / _newWidth) * 0.50; for (int i = 0; i < channels; i++) { GrayImage channel = new GrayImage(_sourceData[i]); channel = Convolution.Instance.GaussianConv(channel, std); _sourceData[i] = channel.ToByteArray2D(); } // number of pixels to shift in the original image double xStep = (double)width / _newWidth, yStep = (double)height / _newHeight; NNResize resizer = new NNResize(); _destinationData = resizer.Apply(_sourceData, _newWidth, _newHeight); }
public static Bitmap JocrGrayImageToBitmap(GrayImage image) { Bitmap bitmap = new Bitmap(image.Width, image.Height, PixelFormat.Format32bppArgb); BitmapData srcData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadOnly, bitmap.PixelFormat); int stride = srcData.Stride; IntPtr Scan0 = srcData.Scan0; int size = image.Width * image.Height; unsafe { byte *p = (byte *)(void *)Scan0; int index = 0; for (int i = 0; i < size; i++) { p[index] = image.Pixels[i]; index++; p[index] = image.Pixels[i]; index++; p[index] = image.Pixels[i]; index++; p[index] = 255; index++; } } return(bitmap); }
public static List <TextBlock> RunOcr(GrayImage image, Ocr.RunOcrHandler runOcrHandler = null) { try { bool isValidSize = CheckImageSize(image.Width, image.Height); if (!isValidSize) { return(null); } if (runOcrHandler == null) { return(Jocr.Ocr.RecognizeSentences(image)); } else { return(runOcrHandler(image)); } //Debuging purpose only //return Jocr.Ocr.Start(image, JorcImageConvert.ShowJocrGrayImage); } catch (Exception e) { UIUtilities.ShowErrorDialog("RunOcr: " + e.Message + "\n" + e.StackTrace); return(null); } }
public static void ShowJocrGrayImage(GrayImage image) { ImageWindow imgWindow = new ImageWindow(); using (var bm = JocrGrayImageToBitmap(image)) { imgWindow.image.Source = BitmapHelper.BitmapToImageSource(bm); imgWindow.ShowDialog(); } }
/// <summary> /// Extract one kanji letter from a gray image. /// </summary> /// <param name="grayImage"></param> public async void SearchOCROneLetter(GrayImage grayImage) { var oldText = searchTextBox.Text; var caretIndex = searchTextBox.CaretIndex; if (MainWindow.UserPrefs.OcrEngine == OcrEngineType.NlpJdict) { SearchImage(grayImage, Jocr.Ocr.RecognizeOneWord); } await Task.Delay(10); //Wait for search textbox to be updated if needed searchTextBox.Text = oldText.Insert(caretIndex, searchTextBox.Text); }
private static IEnumerable <Frame> BlurFrames(IEnumerable <Frame> frames, GaussianBlurConfiguration blurConfiguration) { double[,] kernel = GaussianBlurKernel.Kernel2D(blurConfiguration.Kernel, blurConfiguration.Sigma); return(frames .AsParallel() .Select(frame => { float[][] image = ImageService.RowCols2Image(frame.ImageRowCols, frame.Rows, frame.Cols); float[][] blurred = GrayImage.Convolve(image, kernel); return new Frame(blurred, frame.StartsAt, frame.SequenceNumber); }) .ToList()); }
private void btnOpenFolder_Click(object sender, EventArgs e) { if (folderBrowserDialog.ShowDialog() == DialogResult.OK) { string folder = folderBrowserDialog.SelectedPath; if (!System.IO.Directory.Exists(folder)) { SimpleConsole.WriteLine("Select folder is not exists."); return; } this.SourceImgFolder.Text = folder; string[] filenames = System.IO.Directory.GetFiles(folder); if (filenames.Length == 0) { SimpleConsole.WriteLine("Select folder is empty."); return; } this.btnOpenFolder.Enabled = false; System.Threading.Thread thradFindGrayImg = new System.Threading.Thread(new System.Threading.ParameterizedThreadStart((object o) => { string[] items = o as string[]; foreach (string item in items) { try { bool isGray = GrayImage.IsGray(item.Reader()); if (isGray) { this.Invoke(new Action(() => { this.GrayImgList.Items.Add(item); })); } } catch (Exception exception) { SimpleConsole.WriteLine(exception); } } this.Invoke(new Action(() => { this.btnOpenFolder.Enabled = true; SimpleConsole.WriteLine("Find over."); })); })); thradFindGrayImg.Start(filenames); } }
public override void ApplyFilter() { int length = _sourceData[0].GetLength(0); int num = _sourceData.Length; double std = (double)(length / _newWidth) * 0.5; for (int i = 0; i < num; i++) { GrayImage data = new GrayImage(_sourceData[i]); data = Convolution.Instance.GaussianConv(data, std); _sourceData[i] = data.ToByteArray2D(); } NNResize nNResize = new NNResize(); _destinationData = nNResize.Apply(_sourceData, _newWidth, _newHeight); }
public static void AnalisePicture(Image <Bgr, byte> inputImage) { DataController.AddInputImage(inputImage); GrayImage = inputImage.SmoothGaussian(9).Convert <Gray, byte>(); GrayWithoutEffect = GrayImage.Clone(); GrayImage = GrayImage.ThresholdBinary(new Gray(FindMinData(GrayImage) + 18), new Gray(255)); var hierarchy = new Mat(); CvInvoke.FindContours(GrayImage, Contours, hierarchy, RetrType.External, ChainApproxMethod.LinkRuns); int numMax = 0, xMax = 0, yMax = 0; for (int i = 0; i < Contours.Size; i++) { double perimeter = CvInvoke.ArcLength(Contours[i], true); VectorOfPoint approximation = new VectorOfPoint(); CvInvoke.ApproxPolyDP(Contours[i], approximation, 0.04 * perimeter, true); Moments moments = CvInvoke.Moments(Contours[i]); var x = (int)(moments.M10 / moments.M00); var y = (int)(moments.M01 / moments.M00); if (perimeter > 1000 && perimeter < 4000 && approximation.Size >= 6) { numMax = i; xMax = x; yMax = y; } } X = xMax; Y = yMax; PictureNum = numMax; }
private void SearchImage(GrayImage grayImage, Jocr.Ocr.RunOcrHandler runOcrHandler = null) { if (!JocrWrapper.IsOcrParametersInit) { progressRing.StartAnimation(); } Task.Run(() => { if (!JocrWrapper.IsOcrParametersInit) { JocrWrapper.InitOcrParameters(); } currentDispatcher.Invoke(() => { var textBlocks = JocrWrapper.RunOcr(grayImage, runOcrHandler); SearchJOcrResults(textBlocks); progressRing.StopAnimation(); OcrFinishedEvent?.Invoke(null, null); }); }); }
private void MakePictureGrayAgain(int method) { GrayImage?.Dispose(); GrayImage = new DirectBitmap(OriginalImage.Bitmap); switch (method) { case 0: GrayImage.LightnessGray(); break; case 1: GrayImage.AverageGray(); break; case 2: GrayImage.LuminosityGray(); break; default: MessageBox.Show("Akuku"); break; } }
private void MakeImg(int mxx, int myy) { if (nComp == 1){ var m = new GrayImage(8*mxx, 8*myy); grayImage = m.subimage(0, 0, width, height); } else{ var h0 = comp[0].h; var v0 = comp[0].v; var hRatio = h0/comp[1].h; var vRatio = v0/comp[1].v; var ratio = YCbCrImage.YCbCrSubsampleRatio.YCbCrSubsampleRatio444; switch ((hRatio << 4) | vRatio){ case 0x11: ratio = YCbCrImage.YCbCrSubsampleRatio.YCbCrSubsampleRatio444; break; case 0x12: ratio = YCbCrImage.YCbCrSubsampleRatio.YCbCrSubsampleRatio440; break; case 0x21: ratio = YCbCrImage.YCbCrSubsampleRatio.YCbCrSubsampleRatio422; break; case 0x22: ratio = YCbCrImage.YCbCrSubsampleRatio.YCbCrSubsampleRatio420; break; case 0x41: ratio = YCbCrImage.YCbCrSubsampleRatio.YCbCrSubsampleRatio411; break; case 0x42: ratio = YCbCrImage.YCbCrSubsampleRatio.YCbCrSubsampleRatio410; break; } var m = new YCbCrImage(8*h0*mxx, 8*v0*myy, ratio); ycbcrImage = m.Subimage(0, 0, width, height); } }
public GrayImage subimage(int x, int y, int w, int h) { var ret = new GrayImage{w = w, h = h, pixels = pixels, stride = stride, offset = y*stride + x}; return ret; }
public void Resize(double scale) { BgrImage = BgrImage.Resize(scale, Inter.Linear); GrayImage = GrayImage.Resize(scale, Inter.Linear); BitMapImage = BitMapConverter.Convert(BgrImage.ToBitmap()); }