static void Main(string[] args) { Console.WriteLine("Loading image...."); var image = new Image <Bgr, byte>("data.png"); var tesOCRprovider = new Tesseract("./tessdata", "eng", OcrEngineMode.TesseractCubeCombined); Console.WriteLine("Analysing data...."); tesOCRprovider.Recognize(image); Tesseract.Character[] characters = tesOCRprovider.GetCharacters(); var text = tesOCRprovider.GetText(); Console.WriteLine("\n\n"); Console.WriteLine(text); Console.WriteLine("\n\nDone"); Console.ReadLine(); }
public void TestOCRBgrText() { using (Tesseract ocr = GetTesseract()) using (Image <Bgr, Byte> img = new Image <Bgr, byte>(480, 200)) { ocr.SetVariable("tessedit_char_whitelist", "ABCDEFGHJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz,"); String message = "Hello, World"; CvInvoke.PutText(img, message, new Point(50, 100), CvEnum.FontFace.HersheySimplex, 1.0, new MCvScalar(0, 0, 255)); //ImageViewer.Show(img); ocr.Recognize(img); String messageOcr = ocr.GetText().TrimEnd('\n', '\r'); // remove end of line from ocr-ed text EmguAssert.AreEqual(message, messageOcr, String.Format("'{0}' is not equal to '{1}'", message, messageOcr)); Tesseract.Character[] results = ocr.GetCharacters(); } }
public MainWindow() { InitializeComponent(); for (var i = 1; i < 15; i++) { ListView1.Items.Add(new ParkingEntry { Name = $"Person #{i}", PlateNumber = $"Plate #{i}", DateEntry = DateTime.Now, DateExit = DateTime.Now.AddHours(i) }); } var path = System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); var folder = System.IO.Path.Combine(path, "tessdata"); _licensePlateDetector = new LicensePlateDetector(folder); Mat m = new Mat(@"C:\Users\cayent\Desktop\ImageTest\222.jpg"); UMat um = m.GetUMat(AccessType.ReadWrite); this.ImagePlate.Image = um; //ProcessImage(m); _ocr = new Tesseract(folder, "eng", OcrEngineMode.TesseractLstmCombined, "ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890"); _ocr.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ-1234567890"); //create OCR engine //_ocr = new Tesseract(System.AppDomain.CurrentDomain.BaseDirectory + @"\Data\\", "eng", OcrEngineMode.TesseractLstmCombined); //_ocr.Init("", "eng", OcrEngineMode.TesseractLstmCombined); //_ocr.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ-1234567890"); _ocr.SetImage(m); _ocr.Recognize(); var text1 = _ocr.GetBoxText(); var text2 = _ocr.GetCharacters(); var text3 = _ocr.GetHOCRText(); //var text4 = _ocr.GetOsdText(1); var text5 = _ocr.GetTSVText(); var text6 = _ocr.GetUNLVText(); var text7 = _ocr.GetUTF8Text(); Plate.Text = text7; }
private void button1_Click(object sender, EventArgs e) { OpenFileDialog op = new OpenFileDialog(); DialogResult res = op.ShowDialog(); if (res == DialogResult.OK) { filename = op.FileName; Image img = Image.FromFile(filename); Bitmap bimg = (Bitmap)img; Image <Gray, byte> grayimg; Bgr color = new Bgr(Color.Red); gimg = new Image <Bgr, byte>(bimg); try { grayimg = gimg.Convert <Gray, Byte>(); Image <Gray, byte> bimimg = grayimg.ThresholdBinary(new Gray(126), new Gray(255)); Tocr.Recognize(bimimg); Tesseract.Character[] chararcters = Tocr.GetCharacters(); foreach (Tesseract.Character c in chararcters) { gimg.Draw(c.Region, color, 1); } pictureBox1.Image = gimg.Resize(pictureBox1.Width, pictureBox1.Height, Emgu.CV.CvEnum.Inter.Linear).Bitmap; string txt = Tocr.GetText(); textBox1.Text = txt; } catch (Exception ex) { } } }
private void btnDetectPlate_Click(object sender, EventArgs e) { Image <Bgr, byte> image = (Image <Bgr, byte>)imgPlate.Image; Mat thresh = new Mat(); Mat gray = new Mat(); CvInvoke.CvtColor(image, gray, ColorConversion.Bgr2Gray); CvInvoke.Threshold(gray, thresh, 120, 255, ThresholdType.Otsu); imgCannyPlate.Image = thresh; _ocr.SetImage(thresh); _ocr.Recognize(); var words = _ocr.GetCharacters(); string text = ""; foreach (var ass in words) { text += ass.Text; } richTextBox1.Text = text; }
static void Main() { //using (Mat image = CvInvoke.Imread("../../characters/higher-res.jpg")) //{ // var scaledImage = new Mat(); // CvInvoke.Resize(image, scaledImage, Size.Empty, 1.5, 1.5); // DetectLinesInImage(scaledImage); //} //using (Mat toolbarObject = CvInvoke.Imread("../../../SURF_Resources/SURF_Toolbar_Mask.png", ImreadModes.Grayscale)) //using (Mat wordbrainObject = CvInvoke.Imread("../../../SURF_Resources/SURF_Wordbrain_Mask.png", ImreadModes.Grayscale)) //using (Mat scene = CvInvoke.Imread("../../../characters/Screenshot_20180530-100638_WordBrain.jpg", ImreadModes.Grayscale)) //{ // CompareImages(scene, toolbarObject, wordbrainObject); // //DetectKeyPoints(scene); //} Console.WriteLine("Starting image recognition"); Task.Factory.StartNew(() => { using (Image <Gray, byte> image = new Image <Gray, byte>(Path.GetFullPath("../../../characters/result.png"))) { using (Tesseract tesseractOcrProvider = new Tesseract("C:\\Program Files\\Tesseract-OCR\\tessdata", "eng", OcrEngineMode.TesseractLstmCombined)) { tesseractOcrProvider.SetVariable("segment_reward_chartype", "0.9"); tesseractOcrProvider.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ"); tesseractOcrProvider.SetImage(image); tesseractOcrProvider.Recognize(); Tesseract.Character[] characters = tesseractOcrProvider.GetCharacters(); string text = tesseractOcrProvider.GetBoxText(); Console.WriteLine(text); } } }).Wait(); Console.WriteLine("Called async image recognition"); }
private void loadImageButton_Click(object sender, EventArgs e) { if (openImageFileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK) { //Bgr drawColor = new Bgr(Color.Blue); Gray drawColor = new Gray(); try { Image <Bgr, Byte> image = new Image <Bgr, byte>(openImageFileDialog.FileName); using (Image <Gray, byte> gray = image.Convert <Gray, Byte>()) { //image.ga //CvInvoke.GaussianBlur(gray, gray, new Size(5, 5), 1.5); CvInvoke.GaussianBlur(gray, gray, new Size(3, 3), 1.5); gray.ROI = new Rectangle(gray.Width / 3, 0, gray.Width - (gray.Width / 3), gray.Height / 3); _ocr.Recognize(gray); Tesseract.Character[] characters = _ocr.GetCharacters(); foreach (Tesseract.Character c in characters) { Tesseract.Character c2 = c; c2.Region.X = c.Region.X + (gray.Width / 3); gray.Draw(c.Region, drawColor, 1); } imageBox1.Image = gray; String text = _ocr.GetText(); ocrTextBox.Text = text; } } catch (Exception exception) { MessageBox.Show(exception.Message); } } }
public void DetectCharacters(Bitmap imageBitmap) { characterRecognizer = new Tesseract(); characterRecognizer.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ-1234567890"); characterRecognizer.Init(@"./tessdata", "rus", OcrEngineMode.TesseractLstmCombined); Image <Bgr, Byte> imageConverted = new Image <Bgr, Byte>(imageBitmap).Copy(); Mat mat = imageConverted.Mat; Pix image = new Pix(mat); characterRecognizer.SetImage(image); var rec = characterRecognizer.Recognize(); var characters = characterRecognizer.GetCharacters(); for (int i = 0; i < characters.Length; i++) { var region = characters[i].Region; if (region.Height == 0 || region.Width == 0) { Bitmap space = new Bitmap(32, 32); for (int x = 0; x < space.Width; x++) { for (int y = 0; y < space.Height; y++) { space.SetPixel(x, y, Color.White); } } space.Save("images\\" + i.ToString()); continue; } Bitmap letterBitmap = new Bitmap(region.Width, region.Height); using (Graphics g = Graphics.FromImage(letterBitmap)) { g.DrawImage(imageBitmap, new Rectangle(0, 0, letterBitmap.Width, letterBitmap.Height), region, GraphicsUnit.Pixel); } letterBitmap.Save("images\\" + i.ToString()); } }
/// <summary> /// Recognize the license plate number from a given image /// </summary> /// <remarks> /// Uses a Tesseract OCR library to recognize the set of characters. /// When you would like to distinguish single character PageSegMode.SingleChar should be used, /// otherwise PageSegMode.SingleBlock will be applied. /// Tesseract uses already prepared training data which consists of built-in data set and /// special training set for polish license plates. /// </remarks> /// <param name="imgWithNumber">Mat containing the image of possible license plate area</param> /// <param name="pageMode">PageSegMode which should be used when recognizing the character </param> /// <returns>Recognized plate number</returns> private string RecognizeNumber(Mat imgWithNumber, PageSegMode pageMode = PageSegMode.SingleChar) { Tesseract.Character[] characters; StringBuilder licensePlateNumber = new StringBuilder(); using (var ocr = new Tesseract()) { ocr.Init( _ocrParams["TEST_DATA_PATH"], _ocrParams["TEST_DATA_LANG"], OcrEngineMode.LstmOnly); ocr.SetVariable( "tessedit_char_whitelist", _ocrParams["WHITE_LIST"]); ocr.SetVariable ("user_defined_dpi", "70"); ocr.PageSegMode = pageMode; using (Mat tmp = imgWithNumber.Clone()) { ocr.SetImage(tmp); ocr.Recognize(); characters = ocr.GetCharacters(); for (int i = 0; i < characters.Length; i++) { licensePlateNumber.Append(characters[i].Text); } } return(licensePlateNumber.ToString()); } }
private void getOcr() { _ocr = new Tesseract("", "eng", OcrEngineMode.TesseractCubeCombined); _ocr.SetVariable("tessedit_char_blacklist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ - 1234567890"); Bgr drawColor = new Bgr(Color.Blue); Image <Gray, byte> gray = SoureceImage.Convert <Gray, byte>(); _ocr.Recognize(gray); Tesseract.Character[] characters = _ocr.GetCharacters(); foreach (Tesseract.Character c in characters) { gray.Draw(c.Region, new Gray(0), 1); } DataSource = new DataTable(); DataSource.Columns.Add("字符串", typeof(System.String)); DataRow newLine = DataSource.NewRow(); newLine["字符串"] = _ocr.GetText(); DataSource.Rows.Add(newLine); SoureceImage.Bitmap = gray.Bitmap; ShowFormImage(); }
/// <summary> 识别MAT对象文本 /// 识别MAT对象文本 /// </summary> /// <param name="ocr"></param> /// <param name="image"></param> /// <param name="imageColor"></param> /// <returns></returns> static string OcrImage(Tesseract ocr, Mat image, Mat imageColor) { try { Bgr drawCharColor = new Bgr(Color.Red); if (image.NumberOfChannels == 1) { CvInvoke.CvtColor(image, imageColor, ColorConversion.Gray2Bgr); } else { image.CopyTo(imageColor); } #region Perform a full page OCR ocr.SetImage(imageColor); if (ocr.Recognize() != 0) { throw new Exception("Failed to recognizer image"); } Tesseract.Character[] characters = ocr.GetCharacters(); //if (characters.Length == 0) //{ // Mat imgGrey = new Mat(); // CvInvoke.CvtColor(image, imgGrey, ColorConversion.Bgr2Gray); // Mat imgThresholded = new Mat(); // CvInvoke.Threshold(imgGrey, imgThresholded, 65, 255, ThresholdType.Binary); // ocr.SetImage(imgThresholded); // // 这里一直有异常 // characters = ocr.GetCharacters(); // imageColor = imgThresholded; // if (characters.Length == 0) // { // CvInvoke.Threshold(image, imgThresholded, 190, 255, ThresholdType.Binary); // ocr.SetImage(imgThresholded); // characters = ocr.GetCharacters(); // imageColor = imgThresholded; // } //} foreach (Tesseract.Character c in characters) { CvInvoke.Rectangle(imageColor, c.Region, drawCharColor.MCvScalar); } return(ocr.GetUTF8Text()); #endregion #region Detect the text region before applying OCR. //bool checkInvert = true; //Rectangle[] regions; //using ( // ERFilterNM1 er1 = new ERFilterNM1("trained_classifierNM1.xml", 8, 0.00025f, 0.13f, 0.4f, true, 0.1f)) //using (ERFilterNM2 er2 = new ERFilterNM2("trained_classifierNM2.xml", 0.3f)) //{ // int channelCount = image.NumberOfChannels; // UMat[] channels = new UMat[checkInvert ? channelCount * 2 : channelCount]; // for (int i = 0; i < channelCount; i++) // { // UMat c = new UMat(); // CvInvoke.ExtractChannel(image, c, i); // channels[i] = c; // } // if (checkInvert) // { // for (int i = 0; i < channelCount; i++) // { // UMat c = new UMat(); // CvInvoke.BitwiseNot(channels[i], c); // channels[i + channelCount] = c; // } // } // VectorOfERStat[] regionVecs = new VectorOfERStat[channels.Length]; // for (int i = 0; i < regionVecs.Length; i++) // regionVecs[i] = new VectorOfERStat(); // try // { // for (int i = 0; i < channels.Length; i++) // { // er1.Run(channels[i], regionVecs[i]); // er2.Run(channels[i], regionVecs[i]); // } // using (VectorOfUMat vm = new VectorOfUMat(channels)) // { // regions = ERFilter.ERGrouping(image, vm, regionVecs, ERFilter.GroupingMethod.OrientationHoriz, // "trained_classifier_erGrouping.xml", 0.5f); // } // } // finally // { // foreach (UMat tmp in channels) // if (tmp != null) // tmp.Dispose(); // foreach (VectorOfERStat tmp in regionVecs) // if (tmp != null) // tmp.Dispose(); // } // Rectangle imageRegion = new Rectangle(Point.Empty, imageColor.Size); // for (int i = 0; i < regions.Length; i++) // { // Rectangle r = ScaleRectangle(regions[i], 1.1); // r.Intersect(imageRegion); // regions[i] = r; // } //} //List<Tesseract.Character> allChars = new List<Tesseract.Character>(); //String allText = String.Empty; //foreach (Rectangle rect in regions) //{ // using (Mat region = new Mat(image, rect)) // { // ocr.SetImage(region); // if (ocr.Recognize() != 0) // throw new Exception("Failed to recognize image"); // Tesseract.Character[] characters = ocr.GetCharacters(); // //convert the coordinates from the local region to global // for (int i = 0; i < characters.Length; i++) // { // Rectangle charRegion = characters[i].Region; // charRegion.Offset(rect.Location); // characters[i].Region = charRegion; // } // allChars.AddRange(characters); // allText += ocr.GetUTF8Text() + Environment.NewLine; // } //} //Bgr drawRegionColor = new Bgr(Color.Red); //foreach (Rectangle rect in regions) //{ // CvInvoke.Rectangle(imageColor, rect, drawRegionColor.MCvScalar); //} //foreach (Tesseract.Character c in allChars) //{ // CvInvoke.Rectangle(imageColor, c.Region, drawCharColor.MCvScalar); //} //return allText; #endregion } catch (Exception ex) { return(string.Empty); } }
static void Main(string[] args) { string winName = "test-win"; string imagePath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "..", "..", "assets", "1234R.jpg"); Bitmap image = new Bitmap(imagePath); Util.Gray(image); byte threshold = Util.Otsu(image); //Console.WriteLine("threshold is: " + threshold); Util.Thresholding(image, threshold); // 这一步调用 ToImage 之后 图片会自动逆时针转90度 Image <Bgr, byte> image1 = image.ToImage <Bgr, byte>(); //image1 = image1.Rotate(90, new Bgr(0, 0, 0)); Util.displayImg(winName + "1", image1); //使用高斯滤波去除噪声 // 这里new Size(3, 3) 的3 很关键,好像太大 太小 都找不对顶点 CvInvoke.GaussianBlur(image1, image1, new Size(7, 7), 3); Util.displayImg(winName + "2", image1); Point left = new Point(); Point right = new Point(); image1 = Util.FindVertices(image1, ref left, ref right); Util.displayImg(winName + "3", image1); // 画线 看找到的顶点对不对 //Point[] points = new Point[2]; //points[0] = left; //points[1] = right; //image1.DrawPolyline(points, true, new Bgr(Color.Red), 10); //image1.Save(Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "new.png")); Console.WriteLine("1 left: " + left.X + " " + left.Y); Console.WriteLine("1 right: " + right.X + " " + right.Y); double angle = Util.getAngleTray(left, right, left, new Point(right.X, left.Y)); Console.WriteLine("angle is: " + angle); // 旋转 image1 = image1.Rotate(90 + angle, new Bgr(0, 0, 0)); Util.displayImg(winName + "4", image1); // 旋转后的顶点 image1 = Util.FindVertices(image1, ref left, ref right); Util.displayImg(winName + "5", image1); int cutWidth = 700; int cutHeight = 800; Bitmap MasterMap = new Bitmap(cutWidth, cutHeight, PixelFormat.Format32bppRgb); Graphics g = Graphics.FromImage(MasterMap); Rectangle dest = new Rectangle(0, 0, cutWidth, cutHeight); Rectangle source = new Rectangle(left.X + 300, left.Y + 200, cutWidth, cutHeight); g.DrawImage(image1.AsBitmap(), dest, source, GraphicsUnit.Pixel); // 这里调用后 图片没有旋转 不知道为什么 image1 = MasterMap.ToImage <Bgr, byte>(); Util.displayImg(winName + "6", image1); #region 开操作 没什么效果 https://www.cnblogs.com/ssyfj/p/9277688.html //var image1 = image.ToImage<Bgr, byte>(); //Mat kernel1 = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(10, 10), new Point(0, 0)); //image1.MorphologyEx(Emgu.CV.CvEnum.MorphOp.Gradient, kernel1, new Point(0, 0), 1, Emgu.CV.CvEnum.BorderType.Default, new MCvScalar()); //image1.Save(Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "new.png")); #endregion string ocrPath = "./Properties/tessdata"; //string language = "chi_sim"; string language = "eng"; Tesseract ocr = new Tesseract(ocrPath, language, OcrEngineMode.Default); ocr.SetImage(image1); ocr.Recognize(); Tesseract.Character[] characters = ocr.GetCharacters(); string strRsult = string.Empty; try { strRsult = ocr.GetUTF8Text(); Console.WriteLine(strRsult); } catch (Exception) { Console.WriteLine("Error"); return; } }
public IEnumerable <string> GetCharactersFromImage(byte[] imageData) { List <string> recognizedCharacters = new List <string>(); using (MemoryStream memoryStream = new MemoryStream(imageData)) { Bitmap bitmapImage = (Bitmap)Image.FromStream(memoryStream); Image <Gray, byte> image = new Image <Gray, byte>(bitmapImage); using (Tesseract tesseract = new Tesseract(_tesseractPath, _language, OcrEngineMode.TesseractOnly)) { tesseract.SetVariable("tessedit_char_whitelist", "QWERTYUIOPASDFGHJKLZXCVBNM"); tesseract.SetImage(image); tesseract.Recognize(); IEnumerable <Tesseract.Character> characters = DiscardUncenteredCharacters(tesseract.GetCharacters()); foreach (Tesseract.Character character in characters) { if (!string.IsNullOrWhiteSpace(character.Text)) { recognizedCharacters.Add(character.Text); } } } } return(recognizedCharacters); }
private void FindLicensePlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny, List <IInputOutputArray> licensePlateImagesList, List <IInputOutputArray> filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList, List <string> licenses ) { for (; idx >= 0; idx = hierachy[idx, 0]) { int numberOfChildren = GetNumberOfChildren(hierachy, idx); if (numberOfChildren == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { if (CvInvoke.ContourArea(contour) > 400) { if (numberOfChildren < 3) { //Если нашли менее 3-х символов, то не считаем это номером //При этом надо проверить содержимое каждого элемента внутри может содержаться номер. FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); continue; } RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } double whRatio = (double)box.Size.Width / box.Size.Height; if (!(3.0 < whRatio && whRatio < 10.0)) //if (!(1.0 < whRatio && whRatio < 2.0)) { //если соотношение сторон не соответствует, то это не номер авто. //Однако мы должны проверить вложения, номер может находиться внутри контура //Contour<Point> child = contours.VNext; if (hierachy[idx, 2] > 0) { FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); } continue; } using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } //изменяем размер номера таким образом чтобы размер шрифта был примерно 10-12. Это даст большую точность Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); //делаем отступы от краев int edgePixelSize = 2; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(tmp2, newRoi); UMat filteredPlate = FilterPlate(plate); Tesseract.Character[] words; StringBuilder strBuilder = new StringBuilder(); using (UMat tmp = filteredPlate.Clone()) { _ocr.Recognize(); //_ocr.Recognize(tmp); words = _ocr.GetCharacters(); if (words.Length == 0) { continue; } for (int i = 0; i < words.Length; i++) { strBuilder.Append(words[i].Text); } } licenses.Add(strBuilder.ToString()); licensePlateImagesList.Add(plate); filteredLicensePlateImagesList.Add(filteredPlate); detectedLicensePlateRegionList.Add(box); } } } } }
private void button2_Click(object sender, EventArgs e) //procesiranje slike { c = 0; fields = new List <Image <Gray, byte> >(); grid = new int[9, 9]; OpenFileDialog opf = new OpenFileDialog(); if (opf.ShowDialog() != DialogResult.OK) { return; } Image <Gray, Byte> gray = new Image <Gray, byte>(opf.FileName); imageBox1.Image = gray.Clone(); Image <Gray, Byte> izhod = new Image <Gray, byte>(gray.Width, gray.Height); // binariziranje(adaptivna pragovna segmentacija) izhod = gray.ThresholdAdaptive(new Gray(255), Emgu.CV.CvEnum.AdaptiveThresholdType.MeanC, Emgu.CV.CvEnum.ThresholdType.BinaryInv, 11, new Gray(11)); izhod._SmoothGaussian(1); //glajenje, (ni obvezno potrebno) // imageBox2.Image = izhod.Clone(); var nova = izhod.Clone(); //posopek za iskanje najvecjega povezanega objekta VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.FindContours(izhod, vvp, hierarchy, Emgu.CV.CvEnum.RetrType.Tree, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); int largest_contour_index = 0; double largest_area = 0; VectorOfPoint largestContour; for (int i = 0; i < vvp.Size; i++) { double a = CvInvoke.ContourArea(vvp[i], false); if (a > largest_area) { largest_area = a; largest_contour_index = i; } } largestContour = vvp[largest_contour_index]; Point[] lc = largestContour.ToArray(); //iskanje kote za perspektivna transformacija Point topleft = new Point(gray.Width, gray.Height); Point topright = new Point(0, gray.Height); Point botright = new Point(0, 0); Point botleft = new Point(gray.Width, 0); foreach (Point p in lc) { if ((p.X + p.Y) < (topleft.X + topleft.Y)) { topleft = p; } else if ((p.X - p.Y) > (topright.X - topright.Y)) { topright = p; } else if ((p.X + p.Y) > (botright.X + botright.Y)) { botright = p; } else if ((p.Y - p.X) > (botleft.Y - botleft.X)) { botleft = p; } } //prerisemo gridlines, da se znebimo linije, potem ostanejo le stevilke(lazja razpoznava) CvInvoke.DrawContours(nova, vvp, largest_contour_index, new MCvScalar(0, 0, 0), 6, Emgu.CV.CvEnum.LineType.EightConnected, hierarchy, 1); Image <Gray, Byte> warp = new Image <Gray, byte>(450, 450); PointF[] src = new PointF[] { topleft, topright, botright, botleft }; PointF[] dst = new PointF[] { new Point(0, 0), new Point(450, 0), new Point(450, 450), new Point(0, 450) }; Mat warpmat = CvInvoke.GetPerspectiveTransform(src, dst); //racunamo matrika za transformacija CvInvoke.WarpPerspective(nova, warp, warpmat, new Size(450, 450)); //izvedemo transformacija //imageBox1.Image = nova; imageBox2.Image = warp; //warp._Erode(1); //krcenje ali sirjenje, ni potrebno //warp._Dilate(1); //razpoznava stevilk, 2 moznosti (izbira so radiogumbov) if (radioButton1.Checked) { tess = new Tesseract(@"C:/Emgu/emgucv-windows-universal 3.0.0.2157/bin/", null, OcrEngineMode.Default, "123456789 "); } fields = new List <Image <Gray, byte> >(); //hranim polja za lazjo debagiranje for (int i = 0; i < 9; i++) { for (int j = 0; j < 9; j++) { Image <Gray, Byte> temp = (warp.GetSubRect(new Rectangle(j * 50 + 3, i * 50 + 3, 44, 44))).Clone(); //malo izpustimo po robu polja temp._SmoothGaussian(1); Gray sum = temp.GetSum(); //ce ni dovolj beli pikslov(dele objektov), ni stevilka. if (sum.Intensity < 30000) { continue; } //spet iscemo najvecji element v polju, predvidevam da je stevilo VectorOfVectorOfPoint vvptemp = new VectorOfVectorOfPoint(); Mat hierarchytemp = new Mat(); CvInvoke.FindContours(temp, vvptemp, hierarchytemp, Emgu.CV.CvEnum.RetrType.Tree, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); int ind = 0; double area = 100; VectorOfPoint contour; for (int k = 0; k < vvptemp.Size; k++) { double ar = CvInvoke.ContourArea(vvptemp[k], false); if (ar > area) { area = ar; ind = k; } } if (area == 100) { continue; //ce je najvecjega kontura area manj kot 100 (polje je 44x44), povzamemem da ni stevilo } contour = vvptemp[ind]; //kontura stevlike var tempimg = new Image <Gray, Byte>(44, 44, new Gray(0)); CvInvoke.DrawContours(tempimg, vvptemp, ind, new MCvScalar(255, 0, 0), -1, Emgu.CV.CvEnum.LineType.EightConnected, hierarchytemp); //narisemo notranjosti najvecjega kontura v novi sliki z belo barvo fields.Add(tempimg); //dodamo za pogled if (radioButton2.Checked) { Rectangle br = CvInvoke.BoundingRectangle(contour); int indeks = 0; double vrednost = double.MaxValue; for (int q = 0; q < 9; q++) { //racunamo podobnost s vsakem vzorcu var kraj = tempimg.GetSubRect(new Rectangle(br.X, br.Y, vzorci[q].Width, vzorci[q].Height)); var pod = vzorci[q].AbsDiff(kraj); var podobnost = pod.GetSum(); if (podobnost.Intensity < vrednost) { indeks = q + 1; //ker je zero based vrednost = podobnost.Intensity; } } grid[i, j] = indeks;//najbolj podobni je zaznana stevilka } else { tess.Recognize(tempimg); //raspoznava slike s pomocjo tesseract OCR vgrajen v openCV var x = tess.GetCharacters(); if (x.Length == 1) { grid[i, j] = Convert.ToInt32(x[0].Text); } } } } NarisiStevilke(); //izpisemo stevilki v polje }
private void FindPlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny, List <IInputOutputArray> PlateImageList, List <IInputOutputArray> filteredPlateImageList, List <RotatedRect> detectedPlateRegionList, List <String> word) { for (; idx >= 0; idx = hierachy[idx, 0]) { // 寻找有多少个子轮廓 int numberOfChildren = GetNumberOfChildren(hierachy, idx); if (numberOfChildren == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { // 轮廓内的面积 if (CvInvoke.ContourArea(contour) > 400) { if (numberOfChildren < 3) { // 子轮廓小于3 认定不是字符串区域 // 再次去搜索这个子轮廓 子项索引 FindPlate(contours, hierachy, hierachy[idx, 2], gray, canny, PlateImageList, filteredPlateImageList, detectedPlateRegionList, word); continue; } // 旋转矩形 最小面积矩形 RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } double whRatio = (double)box.Size.Width / box.Size.Height; // 字符串长度和宽度 if (!(3.0 < whRatio && whRatio < 10.0)) { // 认定不是个字符串 但是继续搜索子区域 if (hierachy[idx, 2] > 0) { FindPlate(contours, hierachy, hierachy[idx, 2], gray, canny, PlateImageList, filteredPlateImageList, detectedPlateRegionList, word); continue; } } using (Mat tmp1 = new Mat()) using (Mat tmp2 = new Mat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destConers = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destConers)) { CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } // 调整字符串大小 Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); // 移除边缘像素 int edgePixlSize = 2; Rectangle newRoi = new Rectangle(new Point(edgePixlSize, edgePixlSize), tmp2.Size - new Size(2 * edgePixlSize, 2 * edgePixlSize)); Mat plate = new Mat(tmp2, newRoi); Mat filteredPlate = FilterPlate(plate); Tesseract.Character[] words; StringBuilder strBuilder = new StringBuilder(); using (Mat tmp = filteredPlate.Clone()) { // 检测图片 _ocr.Recognize(tmp); words = _ocr.GetCharacters(); if (words.Length == 0) { continue; } for (int i = 0; i < words.Length; i++) { strBuilder.Append(words[i].Text); } } word.Add(strBuilder.ToString()); PlateImageList.Add(plate); filteredPlateImageList.Add(filteredPlate); detectedPlateRegionList.Add(box); } } } } }
private void FindPlate( VectorOfVectorOfPoint cevreler, int[,] hiyerarsi, int idx, IInputArray gri, IInputArray cannykenar, List <IInputOutputArray> PlateGoruntuLisesi, List <IInputOutputArray> filtrelenmisPlateGoruntuListesi, List <RotatedRect> tespitEdilenPlateBolgesiListesi, List <String> licenses) { for (; idx >= 0; idx = hiyerarsi[idx, 0]) { int karaktersayisi = CharacterCount(hiyerarsi, idx); if (karaktersayisi == 0) { continue; } using (VectorOfPoint cevre = cevreler[idx]) { if (CvInvoke.ContourArea(cevre) > 400) { if (karaktersayisi < 3) { FindPlate(cevreler, hiyerarsi, hiyerarsi[idx, 2], gri, cannykenar, PlateGoruntuLisesi, filtrelenmisPlateGoruntuListesi, tespitEdilenPlateBolgesiListesi, licenses); continue; } RotatedRect kutu = CvInvoke.MinAreaRect(cevre); if (kutu.Angle < -45.0) { float tmp = kutu.Size.Width; kutu.Size.Width = kutu.Size.Height; kutu.Size.Height = tmp; kutu.Angle += 90.0f; } else if (kutu.Angle > 45.0) { float tmp = kutu.Size.Width; kutu.Size.Width = kutu.Size.Height; kutu.Size.Height = tmp; kutu.Angle -= 90.0f; } double enboyoran = (double)kutu.Size.Width / kutu.Size.Height; if (!(3.0 < enboyoran && enboyoran < 10.0)) { if (hiyerarsi[idx, 2] > 0) { FindPlate(cevreler, hiyerarsi, hiyerarsi[idx, 2], gri, cannykenar, PlateGoruntuLisesi, filtrelenmisPlateGoruntuListesi, tespitEdilenPlateBolgesiListesi, licenses); } continue; } using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] kutuKoseNokta = kutu.GetVertices(); // kutunun köşe noktalarını alır PointF[] destCorners = new PointF[] { new PointF(0, kutu.Size.Height - 1), new PointF(0, 0), new PointF(kutu.Size.Width - 1, 0), new PointF(kutu.Size.Width - 1, kutu.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(kutuKoseNokta, destCorners)) { CvInvoke.WarpAffine(gri, tmp1, rot, Size.Round(kutu.Size)); } Size yaklasikBoyut = new Size(240, 180); // yaklaşık boyut double olcek = Math.Min(yaklasikBoyut.Width / kutu.Size.Width, yaklasikBoyut.Height / kutu.Size.Height); Size newSize = new Size((int)Math.Round(kutu.Size.Width * olcek), (int)Math.Round(kutu.Size.Height * olcek)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); int edgePixelSize = 2; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat Plate = new UMat(tmp2, newRoi); UMat filtrelenmisPlate = FilterPlate(Plate); Tesseract.Character[] words; StringBuilder strBuilder = new StringBuilder(); using (UMat tmp = filtrelenmisPlate.Clone()) { OCR.Recognize(tmp); words = OCR.GetCharacters(); if (words.Length == 0) { continue; } for (int i = 0; i < words.Length; i++) { strBuilder.Append(words[i].Text); } } int sayac = 0; int bosluk = 0; for (int i = 0; i < strBuilder.Length; i++) { if (strBuilder[i].ToString() == "." || strBuilder[i].ToString() == "?" || strBuilder[i].ToString() == "*" || strBuilder[i].ToString() == "-") { sayac++; } if (strBuilder[i].ToString() == " ") { bosluk++; } } if (strBuilder[0].ToString() == "E" || strBuilder[0].ToString() == "F" || strBuilder[0].ToString() == "I" || strBuilder[0].ToString() == "T") { strBuilder.Remove(0, 1); } if (sayac == 0 && strBuilder.Length >= 9 && bosluk <= 2) { string[] parcalar; String asd = strBuilder.ToString(); parcalar = asd.Split(' '); if (parcalar.Length == 3) { CityCode = parcalar[0]; LetterGruop = parcalar[1]; NumberGroup = parcalar[2]; if (CityCode.Length == 3) { CityCode = CityCode.Remove(0, 1); } CityCode = CityCode.Replace('B', '8'); CityCode = CityCode.Replace('D', '0'); CityCode = CityCode.Replace('G', '6'); CityCode = CityCode.Replace('I', '1'); CityCode = CityCode.Replace('O', '0'); CityCode = CityCode.Replace('S', '5'); CityCode = CityCode.Replace('Z', '2'); CityCode = CityCode.Replace('L', '4'); LetterGruop = LetterGruop.Replace('2', 'Z'); LetterGruop = LetterGruop.Replace('0', 'D'); LetterGruop = LetterGruop.Replace('5', 'S'); LetterGruop = LetterGruop.Replace('6', 'G'); LetterGruop = LetterGruop.Replace('8', 'B'); LetterGruop = LetterGruop.Replace('1', 'I'); NumberGroup = NumberGroup.Replace('B', '8'); NumberGroup = NumberGroup.Replace('D', '0'); NumberGroup = NumberGroup.Replace('G', '6'); NumberGroup = NumberGroup.Replace('I', '1'); NumberGroup = NumberGroup.Replace('O', '0'); NumberGroup = NumberGroup.Replace('S', '5'); NumberGroup = NumberGroup.Replace('Z', '2'); NumberGroup = NumberGroup.Replace('A', '4'); if (NumberGroup.Length == 5) { NumberGroup = NumberGroup.Remove(4, 1); } strBuilder.Remove(0, strBuilder.Length); strBuilder.Append(CityCode); strBuilder.Append(" "); strBuilder.Append(LetterGruop); strBuilder.Append(" "); strBuilder.Append(NumberGroup); } if (strBuilder.Length >= 7 && strBuilder.Length <= 10) { licenses.Add(strBuilder.ToString()); PlateGoruntuLisesi.Add(Plate); filtrelenmisPlateGoruntuListesi.Add(filtrelenmisPlate); tespitEdilenPlateBolgesiListesi.Add(kutu); } } } } } } }
public IActionResult Steps(StepData uploadFile) { StepData stepData = new StepData(); ProcessCaptured processCaptured = new ProcessCaptured(); Image <Bgra, Byte> imgg = null; Mat finalImg = new Mat(); using (MemoryStream ms = new MemoryStream()) { if (ms != null) { CvInvoke.UseOpenCL = true; uploadFile.FormFile.CopyTo(ms); byte[] fileBytes = ms.ToArray(); imgg = processCaptured.GetImageFromStream(ms); finalImg = imgg.Mat.Clone(); stepData = AddData(stepData, finalImg, "Orginal"); using (Mat im = imgg.Mat) { using (Mat threshold = new Mat()) { using (Mat gray = new Mat()) { using (Mat canny = new Mat()) { using (Mat rectImg = new Mat()) { CvInvoke.Threshold(im, threshold, 100, 255, ThresholdType.BinaryInv); stepData = AddData(stepData, threshold, "Threshold"); CvInvoke.CvtColor(threshold, gray, ColorConversion.Bgr2Gray); stepData = AddGrayData(stepData, gray, "Gray"); CvInvoke.Canny(gray, canny, 100, 50, 7); stepData = AddData(stepData, canny, "Canny"); List <RotatedRect> rect = new List <RotatedRect>(); rect = Contours(canny); if (rect != null && rect.Count > 0) { foreach (RotatedRect boxr in rect) { CvInvoke.Polylines(finalImg, Array.ConvertAll(boxr.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2); } } stepData = AddData(stepData, finalImg, "With Rectangle"); List <Mat> mat = RoI(rect, gray); int i = 0; //OCR string path = AppContext.BaseDirectory; Tesseract _ocr = new Tesseract(path, "eng", OcrEngineMode.TesseractLstmCombined); _ocr.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ-1234567890"); foreach (Mat m in mat) { i += 1; _ocr.SetImage(m); _ocr.Recognize(); Tesseract.Character[] words = _ocr.GetCharacters(); //string wor=words. StringBuilder sb = new StringBuilder(); foreach (var c in words) { sb.Append(c.Text); } stepData = AddData(stepData, m, i.ToString(), sb.ToString()); } } } } } } //Mat blur = new Mat(); //blur = Blur(im); //stepData = AddData(stepData, blur, "Blur"); //Mat gray = new Mat(); //gray = Gray(blur); //stepData = AddData(stepData, gray, "Gray"); //Mat threshold = new Mat(); //threshold = Threshold(im); //stepData = AddData(stepData, threshold, "Threshold"); //Mat gray = new Mat(); //gray = Gray(threshold); //stepData = AddGrayData(stepData, gray, "Gray"); //Mat canny = new Mat(); //canny = Canny(gray); //stepData = AddData(stepData, canny, "Canny"); //List<RotatedRect> rect = new List<RotatedRect>(); //rect = Contours(canny); //Mat rectImg = new Mat(); //rectImg = DrawRect(imgg.Mat, rect); //stepData = AddData(stepData, rectImg, "With Rectangle"); //List<Mat> mat = RoI(rect, threshold.Clone()); //int i = 0; //foreach (Mat m in mat) //{ // i += 1; // stepData = AddData(stepData, m, i.ToString()); //} } } #region Old //Image<Bgr, Byte> imgg = null; //Image<Bgr, byte> tempImg = null; //Image<Gray, Byte> GrayImg = null; //Image<Gray, Byte> CannyImgTemp = null; //Image<Bgr, Byte> CannyImg = null; //Image<Bgr, byte> temp = null; //int[,] hierachy = null; //List<VectorOfPoint> box = new List<VectorOfPoint>(); //List<RotatedRect> boxList = new List<RotatedRect>(); //CvInvoke.UseOpenCL = true; //using (MemoryStream ms = new MemoryStream()) //{ // if (ms != null) // { // uploadFile.FormFile.CopyTo(ms); // byte[] fileBytes = ms.ToArray(); // imgg = processCaptured.GetImageFromStream(ms); // Mat im = imgg.Mat; // stepData = AddData(stepData, im, "Orginal"); // tempImg = imgg.Copy(); // } //} //using (Mat blur = new Mat()) //{ // using (Mat gray = new Mat()) // { // using (Mat canny = new Mat()) // { // Size kSize = new Size(3, 3); // // CvInvoke.GaussianBlur(imgg, blur, kSize, 0); // CvInvoke.Threshold(imgg, blur, 50, 255, ThresholdType.Binary); // //CvInvoke.BilateralFilter(imgg, blur, -1, -1, 100); // stepData = AddData(stepData, blur, "Gray"); // CvInvoke.Threshold(imgg, gray,50, 255, ThresholdType.Binary); // //CvInvoke.CvtColor(blur, gray, ColorConversion.Bgr2Gray); // stepData = AddData(stepData, gray, "Threshold"); // CvInvoke.Canny(imgg, canny, 100, 50, 3, false); // CannyImgTemp = canny.ToImage<Gray, byte>(); // stepData = AddData(stepData, gray, "canny"); // //Find the Rectangle // using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) // { // using (VectorOfPoint approxContour = new VectorOfPoint()) // { // //hierachy = CvInvoke.FindContourTree(canny, contours, ChainApproxMethod.ChainApproxSimple); // CvInvoke.FindContours(canny, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); // //for (int i = 0; i < hierachy.GetLength(0); i++) // int count = contours.Size; // for (int i = 0; i < count; i++) // { // CvInvoke.ApproxPolyDP(contours[i], approxContour, CvInvoke.ArcLength(contours[i], true) * 0.05, true); // if (CvInvoke.ContourArea(approxContour, false) > 250) // { // // if (approxContour.Size <=6) //The contour has 4 vertices. // // { // //#region determine if all the angles in the contour are within [80, 100] degree // //bool isRectangle = true; // //Point[] pts = approxContour.ToArray(); // //LineSegment2D[] edges = PointCollection.PolyLine(pts, true); // //for (int j = 0; j < edges.Length; j++) // //{ // // double angle = Math.Abs( // // edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); // // if (angle < 80 || angle > 100) // // { // // isRectangle = false; // // break; // // } // //} // //#endregion // //if (isRectangle) // boxList.Add(CvInvoke.MinAreaRect(approxContour)); // // } // } // } // } // } // } // } //} //foreach (RotatedRect boxr in boxList) //{ // CvInvoke.Polylines(tempImg, Array.ConvertAll(boxr.GetVertices(), Point.Round), true, // new Bgr(Color.DarkOrange).MCvScalar, 2); //} //Mat pI = tempImg.Mat; //stepData = AddData(stepData, pI, "Poly"); #endregion return(View("Index", stepData)); }
public IActionResult Steps(StepData uploadFile) { StepData stepData = new StepData(); ProcessCaptured processCaptured = new ProcessCaptured(); Image <Bgra, Byte> imgg = null; Mat finalImg = new Mat(); using (MemoryStream ms = new MemoryStream()) { if (ms != null) { CvInvoke.UseOpenCL = true; uploadFile.FormFile.CopyTo(ms); byte[] fileBytes = ms.ToArray(); imgg = processCaptured.GetImageFromStream(ms); finalImg = imgg.Mat.Clone(); stepData = AddData(stepData, finalImg, "Orginal"); using (Mat im = imgg.Mat) { using (Mat threshold = new Mat()) { using (Mat gray = new Mat()) { using (Mat canny = new Mat()) { using (Mat rectImg = new Mat()) { CvInvoke.Threshold(im, threshold, 100, 255, ThresholdType.BinaryInv); stepData = AddData(stepData, threshold, "Threshold"); CvInvoke.CvtColor(threshold, gray, ColorConversion.Bgr2Gray); stepData = AddGrayData(stepData, gray, "Gray"); CvInvoke.Canny(gray, canny, 100, 50, 7); stepData = AddData(stepData, canny, "Canny"); List <RotatedRect> rect = new List <RotatedRect>(); rect = Contours(canny); if (rect != null && rect.Count > 0) { foreach (RotatedRect boxr in rect) { CvInvoke.Polylines(finalImg, Array.ConvertAll(boxr.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2); } } stepData = AddData(stepData, finalImg, "With Rectangle"); List <Mat> mat = RoI(rect, gray); int i = 0; //OCR string path = AppContext.BaseDirectory; Tesseract _ocr = new Tesseract(path, "eng", OcrEngineMode.TesseractLstmCombined); _ocr.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ-1234567890"); foreach (Mat m in mat) { i += 1; _ocr.SetImage(m); _ocr.Recognize(); Tesseract.Character[] words = _ocr.GetCharacters(); //string wor=words. StringBuilder sb = new StringBuilder(); foreach (var c in words) { sb.Append(c.Text); } stepData = AddData(stepData, m, i.ToString(), sb.ToString()); } } } } } } //Mat blur = new Mat(); //blur = Blur(im); //stepData = AddData(stepData, blur, "Blur"); //Mat gray = new Mat(); //gray = Gray(blur); //stepData = AddData(stepData, gray, "Gray"); //Mat threshold = new Mat(); //threshold = Threshold(im); //stepData = AddData(stepData, threshold, "Threshold"); //Mat gray = new Mat(); //gray = Gray(threshold); //stepData = AddGrayData(stepData, gray, "Gray"); //Mat canny = new Mat(); //canny = Canny(gray); //stepData = AddData(stepData, canny, "Canny"); //List<RotatedRect> rect = new List<RotatedRect>(); //rect = Contours(canny); //Mat rectImg = new Mat(); //rectImg = DrawRect(imgg.Mat, rect); //stepData = AddData(stepData, rectImg, "With Rectangle"); //List<Mat> mat = RoI(rect, threshold.Clone()); //int i = 0; //foreach (Mat m in mat) //{ // i += 1; // stepData = AddData(stepData, m, i.ToString()); //} } } return(View("Index", stepData)); }
private static string GetTextFromRect(List <RotatedRect> rectangles, UMat grayRectImg) { //CvInvoke.Erode(grayRectImg, grayRectImg, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); //CvInvoke.Dilate(grayRectImg, grayRectImg, null, new Point(-1, -1), 2, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); UMat pyr = new UMat(); CvInvoke.PyrDown(grayRectImg, pyr); CvInvoke.PyrUp(pyr, grayRectImg); string tag = String.Empty; for (int i = 0; i < rectangles.Count; i++) { using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] srcCorners = rectangles[i].GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, rectangles[i].Size.Height - 1), new PointF(0, 0), new PointF(rectangles[i].Size.Width - 1, 0), new PointF(rectangles[i].Size.Width - 1, rectangles[i].Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { //Applies (применяет) an affine transformation to an image. CvInvoke.WarpAffine(grayRectImg, tmp1, rot, Size.Round(rectangles[i].Size)); } //ResizeAndShow("contourAfterAffine", tmp1); Size approxSize = new Size(240, 180); //resize the license plate such that the front is ~ 10-12. //This size of front results in better accuracy (четко,правильно) from tesseract double scale = Math.Min((double)approxSize.Width / tmp1.Size.Width, (double)approxSize.Height / tmp1.Size.Height); Size newSize = new Size((int)Math.Round(tmp1.Size.Width * scale), (int)Math.Round(tmp1.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); //CvInvoke.Imshow("3. Resize", tmp2); //removes some pixels from the edge int edgePixelSize = 2; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat tagRegion = new UMat(tmp2, newRoi); tagRegion = FilterRegion(tmp2); ResizeAndShow("TagRegion", tagRegion); //Get the region of interest on the texts InitOcr("", "eng", OcrEngineMode.TesseractOnly); using (UMat textRegion = tagRegion.Clone()) { _ocr.Recognize(textRegion); Tesseract.Character[] ch = _ocr.GetCharacters(); //int maxHeight = -1; //for (int j = 0; j < ch.Length; j++) //{ // if (ch[j].Region.Height > maxHeight) // maxHeight = ch[j].Region.Height; //} //tag += "GetCharacters: "; for (int j = 0; j < ch.Length; j++) { //if (0.75 * maxHeight <= ch[j].Region.Height && ch[j].Region.Height <= 1.25 * maxHeight) //{ tag += ch[j].Text; //} } tag += "\n"; } } } return(tag); }
private string OcrImage(Tesseract ocr, Mat image, OCRMode mode, Mat imageColor) { Bgr drawCharColor = new Bgr(Color.Red); if (image.NumberOfChannels == 1) { CvInvoke.CvtColor(image, imageColor, ColorConversion.Gray2Bgr); } else { image.CopyTo(imageColor); } if (mode == OCRMode.FullPage) { ocr.SetImage(imageColor); if (ocr.Recognize() != 0) { throw new Exception("Failed to recognizer image"); } Tesseract.Character[] characters = ocr.GetCharacters(); if (characters.Length == 0) { Mat imgGrey = new Mat(); CvInvoke.CvtColor(image, imgGrey, ColorConversion.Bgr2Gray); Mat imgThresholded = new Mat(); CvInvoke.Threshold(imgGrey, imgThresholded, 65, 255, ThresholdType.Binary); ocr.SetImage(imgThresholded); characters = ocr.GetCharacters(); imageColor = imgThresholded; if (characters.Length == 0) { CvInvoke.Threshold(image, imgThresholded, 190, 255, ThresholdType.Binary); ocr.SetImage(imgThresholded); characters = ocr.GetCharacters(); imageColor = imgThresholded; } } foreach (Tesseract.Character c in characters) { CvInvoke.Rectangle(imageColor, c.Region, drawCharColor.MCvScalar); } return(ocr.GetUTF8Text()); } else { bool checkInvert = true; Rectangle[] regions; using (ERFilterNM1 er1 = new ERFilterNM1("trained_classifierNM1.xml", 8, 0.00025f, 0.13f, 0.4f, true, 0.1f)) using (ERFilterNM2 er2 = new ERFilterNM2("trained_classifierNM2.xml", 0.3f)) { int channelCount = image.NumberOfChannels; UMat[] channels = new UMat[checkInvert ? channelCount * 2 : channelCount]; for (int i = 0; i < channelCount; i++) { UMat c = new UMat(); CvInvoke.ExtractChannel(image, c, i); channels[i] = c; } if (checkInvert) { for (int i = 0; i < channelCount; i++) { UMat c = new UMat(); CvInvoke.BitwiseNot(channels[i], c); channels[i + channelCount] = c; } } VectorOfERStat[] regionVecs = new VectorOfERStat[channels.Length]; for (int i = 0; i < regionVecs.Length; i++) { regionVecs[i] = new VectorOfERStat(); } try { for (int i = 0; i < channels.Length; i++) { er1.Run(channels[i], regionVecs[i]); er2.Run(channels[i], regionVecs[i]); } using (VectorOfUMat vm = new VectorOfUMat(channels)) { regions = ERFilter.ERGrouping(image, vm, regionVecs, ERFilter.GroupingMethod.OrientationHoriz, "trained_classifier_erGrouping.xml", 0.5f); } } finally { foreach (UMat tmp in channels) { if (tmp != null) { tmp.Dispose(); } } foreach (VectorOfERStat tmp in regionVecs) { if (tmp != null) { tmp.Dispose(); } } } Rectangle imageRegion = new Rectangle(Point.Empty, imageColor.Size); for (int i = 0; i < regions.Length; i++) { Rectangle r = ScaleRectangle(regions[i], 1.1); r.Intersect(imageRegion); regions[i] = r; } } List <Tesseract.Character> allChars = new List <Tesseract.Character>(); String allText = String.Empty; foreach (Rectangle rect in regions) { using (Mat region = new Mat(image, rect)) { ocr.SetImage(region); if (ocr.Recognize() != 0) { throw new Exception("Failed to recognize image"); } Tesseract.Character[] characters = ocr.GetCharacters(); //convert the coordinates from the local region to global for (int i = 0; i < characters.Length; i++) { Rectangle charRegion = characters[i].Region; charRegion.Offset(rect.Location); characters[i].Region = charRegion; } allChars.AddRange(characters); allText += ocr.GetUTF8Text() + Environment.NewLine; } } Bgr drawRegionColor = new Bgr(Color.Red); foreach (Rectangle rect in regions) { CvInvoke.Rectangle(imageColor, rect, drawRegionColor.MCvScalar); } foreach (Tesseract.Character c in allChars) { CvInvoke.Rectangle(imageColor, c.Region, drawCharColor.MCvScalar); } return(allText); } }
public StepData StepsVideo(string uploadFile) { StepData stepData = new StepData(); ProcessCaptured processCaptured = new ProcessCaptured(); Image <Bgra, Byte> imgg = null; Mat finalImg = new Mat(); if (!string.IsNullOrWhiteSpace(uploadFile)) { byte[] fileBytes = Convert.FromBase64String(uploadFile); using (MemoryStream ms = new MemoryStream(fileBytes)) { imgg = processCaptured.GetImageFromStream(ms); finalImg = imgg.Mat.Clone(); //stepData = AddData(stepData, finalImg.Clone(), "Orginal"); using (Mat im = imgg.Mat.Clone()) { using (Mat threshold = new Mat()) { using (Mat gray = new Mat()) { using (Mat canny = new Mat()) { using (Mat rectImg = new Mat()) { CvInvoke.Threshold(im, threshold, 100, 255, ThresholdType.BinaryInv); //stepData = AddData(stepData, threshold, "Threshold"); CvInvoke.CvtColor(threshold, gray, ColorConversion.Bgr2Gray); //stepData = AddGrayData(stepData, gray, "Gray"); CvInvoke.Canny(gray, canny, 100, 50, 7); //stepData = AddData(stepData, canny, "Canny"); List <RotatedRect> rect = new List <RotatedRect>(); rect = Contours(canny); if (rect != null && rect.Count > 0) { foreach (RotatedRect boxr in rect) { CvInvoke.Polylines(finalImg, Array.ConvertAll(boxr.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2); } } //stepData = AddData(stepData, finalImg, "With Rectangle"); List <Mat> mat = RoI(rect, gray); int i = 0; //OCR string path = AppContext.BaseDirectory; Tesseract _ocr = new Tesseract(path, "eng", OcrEngineMode.TesseractLstmCombined); _ocr.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"); foreach (Mat m in mat) { i += 1; _ocr.SetImage(m); _ocr.Recognize(); Tesseract.Character[] words = _ocr.GetCharacters(); //string wor=words. StringBuilder sb = new StringBuilder(); foreach (var c in words) { sb.Append(c.Text); } if (sb.Length > 3 && sb.Length <= 10) { stepData = AddData(stepData, m, i.ToString(), sb.ToString()); } } } } } } } } } //return View("Index", stepData); finalImg = null; return(stepData); }
private void ProcessFrame(object sender, EventArgs arg) { Mat src = capture.QueryFrame(); imageBox1.Image = src; if (isOstu == true) { Size sz = src.Size; Mat[] mat = src.Split(); Mat dst = new Mat(sz, DepthType.Cv8U, 1); CvInvoke.CvtColor(src, dst, ColorConversion.Bgr2Gray); CvInvoke.Threshold(dst, dst, 0, 255, ThresholdType.Binary | ThresholdType.Otsu); imageBox2.Image = dst; } else if (isGray == true) { Size sz = src.Size; Mat[] mat = src.Split(); Mat dst = new Mat(sz, DepthType.Cv8U, 1); CvInvoke.CvtColor(src, dst, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); //CvInvoke.Threshold(dst, dst, 0, 255, ThresholdType.Binary | ThresholdType.Otsu); imageBox2.Image = dst; } else if (isSobel == true) { int scale = 1; int delta = 0; Size sz = src.Size; Mat[] mat = src.Split(); Mat gray = new Mat(sz, DepthType.Cv8U, 1); Mat grad = new Mat(sz, DepthType.Cv8U, 1); Mat grad_x = new Mat(sz, DepthType.Cv8U, 1), grad_y = new Mat(sz, DepthType.Cv8U, 1); Mat abs_grad_x = new Mat(sz, DepthType.Cv8U, 1), abs_grad_y = new Mat(sz, DepthType.Cv8U, 1); CvInvoke.CvtColor(src, gray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); CvInvoke.Sobel(gray, grad_x, Emgu.CV.CvEnum.DepthType.Cv16S, 1, 0, 3, scale, delta, Emgu.CV.CvEnum.BorderType.Default); CvInvoke.ConvertScaleAbs(grad_x, abs_grad_x, 1.0, 0.0); CvInvoke.Sobel(gray, grad_y, Emgu.CV.CvEnum.DepthType.Cv16S, 0, 1, 3, scale, delta, Emgu.CV.CvEnum.BorderType.Default); CvInvoke.ConvertScaleAbs(grad_y, abs_grad_y, 1.0, 0.0); CvInvoke.AddWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad); imageBox2.Image = grad; } else if (isCanny == true) { Size sz = src.Size; Mat[] mat = src.Split(); Mat dst = new Mat(sz, DepthType.Cv8U, 1); CvInvoke.CvtColor(src, dst, ColorConversion.Bgr2Gray); CvInvoke.Canny(dst, dst, 50, 150, 3); //CvInvoke.Threshold(dst, dst, 0, 255, ThresholdType.Binary | ThresholdType.Otsu); imageBox2.Image = dst; } else if (isMedianBlur == true) { Size sz = src.Size; Mat[] mat = src.Split(); Mat dst = new Mat(sz, DepthType.Cv8U, 3); CvInvoke.MedianBlur(src, dst, 5); imageBox2.Image = dst; } else if (isOcr == true) { Image <Bgr, byte> img; img = new Image <Bgr, byte>(imageBox1.Image.Bitmap); //imgEntrada.Draw(Rect, new Bgr(Color.Red), 3); img.ROI = Rect; Tesseract tess; //tess = new Tesseract("tessdata", "eng", Tesseract.OcrEngineMode.OEM_TESSERACT_CUBE_COMBINED); tess = new Tesseract(@"tessdata\", "eng", OcrEngineMode.TesseractLstmCombined); tess.SetVariable("tessedit_char_whitelist", "1234567890"); Tesseract.Character[] words; Mat dst = new Mat(img.Size, DepthType.Cv8U, 1); CvInvoke.CvtColor(img.Mat, dst, ColorConversion.Bgr2Gray); //CvInvoke.Threshold(dst, dst, 0, 255, ThresholdType.Binary | ThresholdType.Otsu); CvInvoke.Canny(dst, dst, 50, 150, 3); //threshold(dst1, dst2, 128, 255, THRESH_BINARY_INV); CvInvoke.Threshold(dst, dst, 128, 255, ThresholdType.BinaryInv); tess.SetImage(dst); tess.Recognize(); words = tess.GetCharacters(); imageBox2.Image = dst; txtOcr.Text = tess.GetUTF8Text(); } else { imageBox2.Image = src; } }
private void FindLicensePlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny, List <IInputOutputArray> licensePlateImagesList, List <IInputOutputArray> filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList, List <String> licenses) { for (; idx >= 0; idx = hierachy[idx, 0]) { int numberOfChildren = GetNumberOfChildren(hierachy, idx); //if it does not contains any children (charactor), it is not a license plate region if (numberOfChildren == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { if (CvInvoke.ContourArea(contour) > 400) { if (numberOfChildren < 3) { //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor) //However we should search the children of this contour to see if any of them is a license plate FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); continue; } RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } double whRatio = (double)box.Size.Width / box.Size.Height; if (!(3.0 < whRatio && whRatio < 10.0)) //if (!(1.0 < whRatio && whRatio < 2.0)) { //if the width height ratio is not in the specific range,it is not a license plate //However we should search the children of this contour to see if any of them is a license plate //Contour<Point> child = contours.VNext; if (hierachy[idx, 2] > 0) { FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); } continue; } using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); //removes some pixels from the edge int edgePixelSize = 2; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(tmp2, newRoi); UMat filteredPlate = FilterPlate(plate); Tesseract.Character[] words; StringBuilder strBuilder = new StringBuilder(); using (UMat tmp = filteredPlate.Clone()) { _ocr.Recognize(tmp); words = _ocr.GetCharacters(); if (words.Length == 0) { continue; } for (int i = 0; i < words.Length; i++) { strBuilder.Append(words[i].Text); } } licenses.Add(strBuilder.ToString()); licensePlateImagesList.Add(plate); filteredLicensePlateImagesList.Add(filteredPlate); detectedLicensePlateRegionList.Add(box); } } } } }
private void FindLicensePlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, Image <Gray, Byte> gray, Mat canny, List <Image <Gray, Byte> > licensePlateImagesList, List <Image <Gray, Byte> > filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList, List <String> licenses) { for (; idx >= 0; idx = hierachy[idx, 0]) { int numberOfChildren = GetNumberOfChildren(hierachy, idx); //if it does not contains any children (charactor), it is not a license plate region if (numberOfChildren == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { if (CvInvoke.ContourArea(contour) > 400) { if (numberOfChildren < 3) { //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor) //However we should search the children of this contour to see if any of them is a license plate FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); continue; } RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } double whRatio = (double)box.Size.Width / box.Size.Height; if (!(3.0 < whRatio && whRatio < 10.0)) //if (!(1.0 < whRatio && whRatio < 2.0)) { //if the width height ratio is not in the specific range,it is not a license plate //However we should search the children of this contour to see if any of them is a license plate //Contour<Point> child = contours.VNext; if (hierachy[idx, 2] > 0) { FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); } continue; } using (Image <Gray, Byte> tmp1 = gray.Copy(box)) //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract using (Image <Gray, Byte> tmp2 = tmp1.Resize(240, 180, Emgu.CV.CvEnum.Inter.Cubic, true)) { //removes some pixels from the edge int edgePixelSize = 2; tmp2.ROI = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); Image <Gray, Byte> plate = tmp2.Copy(); Image <Gray, Byte> filteredPlate = FilterPlate(plate); Tesseract.Character[] words; StringBuilder strBuilder = new StringBuilder(); using (Image <Gray, Byte> tmp = filteredPlate.Clone()) { _ocr.Recognize(tmp); words = _ocr.GetCharacters(); if (words.Length == 0) { continue; } for (int i = 0; i < words.Length; i++) { strBuilder.Append(words[i].Text); } } licenses.Add(strBuilder.ToString()); licensePlateImagesList.Add(plate); filteredLicensePlateImagesList.Add(filteredPlate); detectedLicensePlateRegionList.Add(box); } } } } }
public Tesseract.Character[] recognize(IInputArray image) { ocr.SetImage(image); ocr.Recognize(); return(ocr.GetCharacters()); }
public StepData StepsVideo(string uploadFile) { StepData stepData = new StepData(); ProcessCaptured processCaptured = new ProcessCaptured(); Image <Bgra, Byte> imgg = null; if (!string.IsNullOrWhiteSpace(uploadFile)) { var element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(22, 3), new Point(1, 1)); byte[] fileBytes = Convert.FromBase64String(uploadFile); using (MemoryStream ms = new MemoryStream(fileBytes)) { imgg = processCaptured.GetImageFromStream(ms); Mat finalImg = new Mat(); using (Mat im = imgg.Mat.Clone()) { using (Mat blur = new Mat()) { using (Mat gray = new Mat()) { using (Mat sobal = new Mat()) { using (Mat threshold = new Mat()) { using (Mat morpology = new Mat()) { using (Mat canny = new Mat()) { Size size = new Size(7, 7); CvInvoke.Threshold(im, threshold, 100, 255, ThresholdType.Binary); //CvInvoke.GaussianBlur(im, blur, size, 0); CvInvoke.CvtColor(im, gray, ColorConversion.Bgr2Gray); //CvInvoke.Sobel(gray, sobal, DepthType.Cv8U, 1, 0, 3); //CvInvoke.Threshold(sobal, threshold, 0, 255, ThresholdType.Otsu); //CvInvoke.MorphologyEx(gray, morpology, MorphOp.Close, element, new Point(0, 0), 1, BorderType.Default, new MCvScalar()); CvInvoke.Canny(gray, canny, 100, 50, 7); finalImg = im.Clone(); var box = Contours(canny); //Mat mat=DrawRect(finalImg, box); List <Mat> roi = RoI(box, finalImg); string path = AppContext.BaseDirectory; using (Tesseract _ocr = new Tesseract(path, "eng", OcrEngineMode.TesseractLstmCombined)) { _ocr.SetVariable("tessedit_char_whitelist", "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"); foreach (Mat m in roi) { _ocr.SetImage(m); _ocr.Recognize(); Tesseract.Character[] words = _ocr.GetCharacters(); //string wor=words. StringBuilder sb = new StringBuilder(); foreach (var c in words) { sb.Append(c.Text); } string strTemp = sb.ToString().Replace(" ", ""); if (strTemp.ToString().Length > 3 && strTemp.Length <= 10) { stepData = AddData(stepData, m, "", strTemp); } } } } } } } } } } } } imgg = null; return(stepData); }