private void GetVideo(object sender, EventArgs e) { //Берем кадр var Kadr = myCapture.QueryFrame(); //Вставляем в imageBox imageBox1.Image = myCapture.QueryFrame(); }
Image <Bgr, byte> QueryFrame(int CameraNum, int SWidth, int SHeigth) { Image <Bgr, byte> _imageCV = null; VideoCapture _VideoCapture = new VideoCapture(CameraNum); _VideoCapture.SetCaptureProperty(CapProp.Autofocus, 39); _VideoCapture.SetCaptureProperty(CapProp.AutoExposure, 21); ICapture _Capture = _VideoCapture as ICapture; Mat _ImgMat = _Capture.QueryFrame(); _ImgMat = _Capture.QueryFrame(); _imageCV = new Image <Bgr, byte>(_ImgMat.Bitmap); _imageCV = _imageCV.Resize(SWidth, SHeigth, Inter.Linear); _VideoCapture.Dispose(); return(_imageCV); }
void Capture(object sender, EventArgs e) { try { _ImgMat = _Capture.QueryFrame(); } catch { }; }
Result[] ScanBarCode() { Result[] resultArray = null; ICapture capture = (ICapture)video; Mat img = capture.QueryFrame(); int with = img.Width; int heigh = img.Height; Image <Hsv, byte> image1 = (Image <Hsv, byte>)null; image1 = new Image <Hsv, byte>(img.Bitmap); int width = integerVariable3 - integerVariable1; int height = integerVariable4 - integerVariable2; Rectangle rect = new Rectangle(integerVariable1, integerVariable2, width, height); image1.Draw(rect, new Hsv(0.0, 0.0, (double)byte.MaxValue), 10, Emgu.CV.CvEnum.LineType.AntiAlias, 0); image1.Save(@"C:\Images\Labe.jpg"); image1.Dispose(); Image Image1 = Bitmap.FromFile(@"C:\Images\Labe.jpg"); Image <Bgr, Byte> myImage1 = new Image <Bgr, Byte>((Bitmap)Image1); int length = 0; string[] strArray1 = (string[])null; string[] strArray2 = (string[])null; bool flag; try { resultArray = new BarcodeReader().DecodeMultiple(myImage1.Bitmap); flag = false; if (resultArray != null) { length = resultArray.Length; } if (length > 0) { flag = true; strArray1 = new string[length]; strArray2 = new string[length]; for (int index = 0; index < length; ++index) { Result result = resultArray[index]; strArray1[index] = result.Text; strArray2[index] = result.BarcodeFormat.ToString(); } } } catch (Exception ex) { } Image1.Dispose(); return(resultArray); }
private void timer1_Tick(object sender, EventArgs e) { integerVariable1 = Convert.ToInt32(txtWidthX0.Text); //"Argument7", "X1"); integerVariable2 = Convert.ToInt32(txtHeightY0.Text); //("Argument8", "Y1"); integerVariable3 = Convert.ToInt32(txtWidthX1.Text); //"Argument9", "X2"); integerVariable4 = Convert.ToInt32(txtHeightY1.Text); //"Argument10", "Y2"); video.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.Autofocus, 0); //video.SetCaptureProperty(CapProp.Focus, 75); ICapture capture = (ICapture)video; Mat img = capture.QueryFrame(); _imgRealTime = (Image <Hsv, byte>)null; _imgRealTime = new Image <Hsv, byte>(img.Bitmap); int width = integerVariable3 - integerVariable1; int height = integerVariable4 - integerVariable2; Rectangle rect = new Rectangle(integerVariable1, integerVariable2, width, height); _imgRealTime.Draw(rect, new Hsv(0.0, 0.0, (double)byte.MaxValue), 5, Emgu.CV.CvEnum.LineType.AntiAlias, 0); CvInvoke.PutText(_imgRealTime, "BarCode: " + _SerialNumberFromBarcode, new Point(100, 60), FontFace.HersheyComplex, 1.0, new Rgb(0, 0, 0).MCvScalar, 2); CvInvoke.PutText(_imgRealTime, "OCR: " + _SerialNumberFromOCR, new Point(100, 100), FontFace.HersheyComplex, 1.0, new Rgb(0, 0, 0).MCvScalar, 2); pictureBox1.Image = _imgRealTime.Bitmap; }
string TesseractAPI() { using (var objOcr = OcrApi.Create()) { objOcr.Init(Patagames.Ocr.Enums.Languages.English); objOcr.SetVariable("tessedit_char_whitelist", "0123456789-Model:"); ICapture capture = (ICapture)video; Mat img = capture.QueryFrame(); int with = img.Width; int heigh = img.Height; Image <Hsv, byte> image1 = (Image <Hsv, byte>)null; image1 = new Image <Hsv, byte>(img.Bitmap); int width = integerVariable3 - integerVariable1; int height = integerVariable4 - integerVariable2; Rectangle rect = new Rectangle(integerVariable1, integerVariable2, width, height); image1.Draw(rect, new Hsv(0.0, 0.0, (double)byte.MaxValue), 10, Emgu.CV.CvEnum.LineType.AntiAlias, 0); image1.Save(@"C:\Images\Labe.jpg"); image1.Dispose(); var _bmp = Bitmap.FromFile(@"C:\Images\Labe.jpg"); string _OCR = objOcr.GetTextFromImage((Bitmap)_bmp); _bmp.Dispose(); return(_OCR); } }
private void ProcessFrame(object sender, EventArgs e) { //String str = String.Format("withBall.jpg"); //originalImg = CvInvoke.Imread(str) originalImg = capture.QueryFrame(); Image <Bgr, Byte> outputImg = originalImg.ToImage <Bgr, Byte>(); int imgWidth = originalImg.Width; int imgHeight = originalImg.Height; UMat grayImg = new UMat(); //Convert RBG to Gray CvInvoke.CvtColor(originalImg, grayImg, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(grayImg, pyrDown); CvInvoke.PyrUp(pyrDown, grayImg); UMat binaryImg = new UMat(); //Find Potiential Plate Region CvInvoke.Threshold(grayImg, binaryImg, 200, 255, ThresholdType.BinaryInv); Image <Gray, Byte> binaryImgG = binaryImg.ToImage <Gray, Byte>(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); int[,] hierachy = CvInvoke.FindContourTree(binaryImgG, contours, ChainApproxMethod.ChainApproxNone); int maxArea = 0; int maxAreaContourIndex = 0; for (int idx = 0; idx < contours.Size; idx++) { //bool isChild = isChildContour(hierachy, idx); int numberOfChildren = GetNumberOfChildren(hierachy, idx); using (VectorOfPoint contour = contours[idx]) { if ((numberOfChildren > 3)) { if (CvInvoke.ContourArea(contour) > maxArea) { maxAreaContourIndex = idx; } } } } Image <Gray, Byte> mask1 = new Image <Gray, Byte>(imgWidth, imgHeight); CvInvoke.DrawContours(mask1, contours, maxAreaContourIndex, new MCvScalar(255), -1); int openingFactor1 = 100; Image <Gray, Byte> plateMask = new Image <Gray, Byte>(imgWidth, imgHeight); plateMask = mask1.Erode(openingFactor1); plateMask = plateMask.Dilate(openingFactor1); CvBlobs blobs = new CvBlobs(); CvBlobDetector blob_detector = new CvBlobDetector(); //blobs.FilterByArea(10000, 1000000); blob_detector.Detect(plateMask, blobs); foreach (CvBlob blob in blobs.Values) { Rectangle r = blob.BoundingBox; outputImg.Draw(r, new Bgr(0, 255, 255), 4); } Image <Gray, Byte> invBinaryImgG = binaryImg.ToImage <Gray, Byte>(); CvInvoke.BitwiseNot(invBinaryImgG, invBinaryImgG); Image <Gray, Byte> mask3 = plateMask.Clone(); CvInvoke.BitwiseAnd(plateMask, invBinaryImgG, mask3); blob_detector.Detect(mask3, blobs); int patternSize = 20; int ballSize = 60; int tolerance = 10; int patternHigh = patternSize + tolerance; int patternLow = patternSize - tolerance; int ballHigh = ballSize + tolerance * 2; int ballLow = ballSize - tolerance * 2; blobs.FilterByArea(patternLow * patternLow, ballHigh * ballHigh); List <PointF> patternPoints = new List <PointF>(); PointF ballPoint = new PointF(); int numberOfPatternPointFound = 0; foreach (CvBlob blob in blobs.Values) { Rectangle r = blob.BoundingBox; if ((r.Height > patternLow) && (r.Height < patternHigh) && (r.Width > patternLow) && (r.Width < patternHigh)) { outputImg.Draw(new CircleF(blob.Centroid, 2), new Bgr(0, 0, 255), 2); patternPoints.Add(blob.Centroid); numberOfPatternPointFound++; } if ((r.Height > ballLow) && (r.Height < ballHigh) && (r.Width > ballLow) && (r.Width < ballHigh)) { outputImg.Draw(new CircleF(blob.Centroid, 5), new Bgr(0, 0, 255), 5); ballPoint = blob.Centroid; } } label14.Text = String.Format("{0}", numberOfPatternPointFound); List <PointF> sortedPatternPoints = new List <PointF>(); // 1 for TopLeft - 2 for Top Right - 3 for Bottom Right - 4 for Bottom Left List <int> pointType = new List <int>();; PointF centerPoint = new PointF(); foreach (PointF patternPoint in patternPoints) { centerPoint.X += patternPoint.X; centerPoint.Y += patternPoint.Y; } centerPoint.X /= numberOfPatternPointFound; centerPoint.Y /= numberOfPatternPointFound; x_position.Text = ballPoint.X.ToString(); y_position.Text = ballPoint.Y.ToString(); foreach (PointF patternPoint in patternPoints) { if ((patternPoint.X < centerPoint.X) && (patternPoint.Y < centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(1); } else if ((patternPoint.X > centerPoint.X) && (patternPoint.Y < centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(2); } else if ((patternPoint.X > centerPoint.X) && (patternPoint.Y > centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(3); } else if ((patternPoint.X < centerPoint.X) && (patternPoint.Y > centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(4); } } int id = 0; foreach (PointF patternPoint in sortedPatternPoints) { CvInvoke.PutText(outputImg, String.Format("{0}", pointType[id++]), new System.Drawing.Point((int)patternPoint.X, (int)patternPoint.Y), FontFace.HersheyComplex, 1.0, new Bgr(0, 255, 0).MCvScalar); } imageBox1.Image = outputImg; }
int PixelCountExample() { double num1 = floatVariable1 / 2.0; double num2 = floatVariable4 / 2.0; double num3 = floatVariable2 / 100.0 * (double)byte.MaxValue; double num4 = floatVariable5 / 100.0 * (double)byte.MaxValue; double num5 = floatVariable3 / 100.0 * (double)byte.MaxValue; double num6 = floatVariable6 / 100.0 * (double)byte.MaxValue; Image <Hsv, byte> image1 = (Image <Hsv, byte>)null; Image <Hsv, byte> image2 = (Image <Hsv, byte>)null; //VideoCapture video = new VideoCapture(0); //video.SetCaptureProperty(CapProp.Focus, 150); ICapture capture = (ICapture)video; System.Threading.Thread.Sleep(2000); Mat ImageMat = capture.QueryFrame(); IImage image3 = (IImage)ImageMat; //image3.Save(@"C:\\Images\example.jpeg"); image3 = new Image <Hsv, byte>(ImageMat.Bitmap); image2 = new Image <Hsv, byte>(image3.Bitmap); image1 = image2.Copy(); int num7 = 0; Hsv hsv1 = new Hsv(); Hsv hsv2 = new Hsv(120.0, (double)byte.MaxValue, (double)byte.MaxValue); for (int index1 = integerVariable2; index1 < integerVariable4; ++index1) { for (int index2 = integerVariable1; index2 < integerVariable3; ++index2) { Hsv hsv3 = image2[index1, index2]; if (hsv3.Value >= num5 && hsv3.Value <= num6 && (hsv3.Satuation >= num3 && hsv3.Satuation <= num4)) { if (num1 > num2) { if (hsv3.Hue >= num1 || hsv3.Hue <= num2) { ++num7; if (booleanVariable) { image1[index1, index2] = hsv2; } } } else if (hsv3.Hue >= num1 && hsv3.Hue <= num2) { ++num7; if (booleanVariable) { image1[index1, index2] = hsv2; } } } } } if (booleanVariable) { int width = integerVariable3 - integerVariable1; int height = integerVariable4 - integerVariable2; Rectangle rect = new Rectangle(integerVariable1, integerVariable2, width, height); image1.Draw(rect, new Hsv(0.0, 0.0, (double)byte.MaxValue), 1, Emgu.CV.CvEnum.LineType.EightConnected, 0); //ImageViewer imageViewer = new ImageViewer(image1.Bitmap, num7); //imageViewer.Show(); pictureBox2.Image = image1.Bitmap; } return(num7); }
void PerfromOCV() { string str1 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz +-/1234567890{}()_<>\"\\;:*#%$&"; string language = "eng"; ICapture capture = (ICapture)video; System.Threading.Thread.Sleep(2000); Mat ImageMat = capture.QueryFrame(); IImage image1 = (IImage)ImageMat; //"Argument0", "Image Object"); language = "eng"; //"Argument1", "OCR Language"); int integerVariable1 = 70; //"Argument2", "Upper Left Corner X coordinate of ROI"); int integerVariable2 = 70; //"Argument3", "Upper Left Corner Y coordinate of ROI"); int integerVariable3 = 70; //"Argument4", "ROI Width (Number of Pixels"); int integerVariable4 = 70; //"Argument5", "ROI Height (Number of Pixels"); bool booleanVariable = false; //"Argument6", "Display Debug Window"); Image <Bgr, byte> image2 = new Image <Bgr, byte>(image1.Bitmap); image2.ROI = new Rectangle(integerVariable1, integerVariable2, integerVariable3, integerVariable4); string str2 = @".\\tessdata"; if (!Directory.Exists(str2)) { Directory.CreateDirectory(str2); } HelperMethods.LoadLanguage(str2, language); HelperMethods.LoadLanguage(str2, "osd"); //HelperMethods.LoadLanguage(str2, language); //string path = Path.GetFullPath(@"C:\Users\MarquezFr\source\repos\EmguOpenCVExample\EmguOpenCVExample\bin\Debug\tessdata\"); var tesseract = new Tesseract(Path.GetFullPath(str2.Length == 0 || str2.Substring(str2.Length - 1, 1).Equals(Path.DirectorySeparatorChar.ToString()) ? str2 : string.Format("{0}{1}", (object)str2, (object)Path.DirectorySeparatorChar)), language, OcrEngineMode.Default); tesseract.Init(Path.GetFullPath(str2.Length == 0 || str2.Substring(str2.Length - 1, 1).Equals(Path.DirectorySeparatorChar.ToString()) ? str2 : string.Format("{0}{1}", (object)str2, (object)Path.DirectorySeparatorChar)), language, OcrEngineMode.Default); //Tesseract tesseract = new Tesseract(path, language, OcrEngineMode.TesseractOnly); tesseract.SetVariable("tessedit_char_whitelist", str1); Image <Bgr, byte> image3 = new Image <Bgr, byte>(image2.Bitmap); if (image1.NumberOfChannels == 1) { CvInvoke.CvtColor((IInputArray)image2, (IOutputArray)image3, ColorConversion.Gray2Bgr, 0); } else { //image3 = (Image<Bgr, byte>)image1.Clone(); //original image3 = image1.Clone() as Image <Bgr, byte>; } //tesseract.SetImage((IInputArray)image3); original tesseract.SetImage((IInputArray)image3); if (tesseract.Recognize() != 0) { MessageBox.Show(string.Format("OpenCV_PerformOCR: OCR system failed to recognize image. OCR returned: {0}", (object)tesseract.Recognize())); } string utF8Text = tesseract.GetUTF8Text(); char[] separator = new char[3] { ' ', '\r', '\n' }; int length = utF8Text.Split(separator, StringSplitOptions.RemoveEmptyEntries).Length; if (booleanVariable) { //ImageViewer imageViewer = new ImageViewer((IImage)image2); //int num = (int)imageViewer.ShowDialog(); //imageViewer.Dispose(); } image2.Dispose(); image3.Dispose(); tesseract.Dispose(); }