private void Recognize() { //settings processor.minContourLength = _contourLength; processor.minContourArea = _contourArea; processor.finder.maxACFDescriptorDeviation = _maxACFDesc; processor.finder.minACF = _minACF; processor.finder.minICF = _minICF; if (_originFrame == null) { return; } Image <Bgr, byte> frame = new Image <Bgr, byte>(BitmapConvert.BitmapImageToBitmap(_originFrame)); //process image processor.ProcessImage(frame); Font font = new Font("Times New Roman", 24);//16 Bitmap bmpFrame = frame.ToBitmap(); Graphics e = Graphics.FromImage(bmpFrame); //e.DrawString(lbFPS.Content.ToString(), new Font("Times New Roman", 16), Brushes.Yellow, new PointF(1, 1)); Brush bgBrush = new SolidBrush(Color.Blue); Brush foreBrush = new SolidBrush(Color.Red); Pen borderPen = new Pen(Color.FromArgb(150, 0, 255, 0)); // if (_showContours) { foreach (var contour in processor.contours) { if (contour.Size > 1) { e.DrawLines(Pens.Red, contour.ToArray()); } } } // lock (processor.foundTemplates) foreach (FoundTemplateDesc found in processor.foundTemplates) { //做什么?? if (found.template.name.EndsWith(".png") || found.template.name.EndsWith(".jpg")) { DrawAugmentedReality(found, e); continue; } Rectangle foundRect = found.sample.contour.SourceBoundingRect; System.Drawing.Point p1 = new System.Drawing.Point((foundRect.Left + foundRect.Right) / 2, foundRect.Top); string text = found.template.name; e.DrawRectangle(borderPen, foundRect); e.DrawString(text, font, bgBrush, new PointF(p1.X + 1 - font.Height / 3, p1.Y + 1 - font.Height)); e.DrawString(text, font, foreBrush, new PointF(p1.X - font.Height / 3, p1.Y - font.Height)); } _frame = BitmapConvert.toBitmapImage(bmpFrame); base.OnPropertyChanged("Frame"); }
private void EdgeDetect() { if (_image != null) { try { if (_isChecked) { _progressValue = 0; base.OnPropertyChanged("ProgressValue"); Bitmap _bmpImage = BitmapConvert.BitmapImageToBitmap(_image); Bitmap edge; Canny.DetectCannyEdges(_bmpImage, out edge, _kernelSize, _sigma, _thrHigh, _thrLow); _cannyImage = BitmapConvert.toBitmapImage(edge); base.OnPropertyChanged("CannyImage"); Bitmap gf, np, se, we; Canny.GaussianFilter(_bmpImage, out gf, _kernelSize, _sigma); _GFImage = BitmapConvert.toBitmapImage(gf); base.OnPropertyChanged("GFImage"); Canny.Suppression(_bmpImage, out np, _kernelSize, _sigma); _NMSImage = BitmapConvert.toBitmapImage(np); base.OnPropertyChanged("NMSImage"); Canny.Threshold(_bmpImage, out se, out we, _kernelSize, _sigma, _thrHigh, _thrLow); _WEImage = BitmapConvert.toBitmapImage(we); base.OnPropertyChanged("WEImage"); _SEImage = BitmapConvert.toBitmapImage(se); base.OnPropertyChanged("SEImage"); } else { Bitmap _bmpImage = BitmapConvert.BitmapImageToBitmap(_image); //获得灰度图 Image <Gray, byte> grayFrame = new Image <Gray, byte>(_bmpImage); if (_equalizeHist) { grayFrame._EqualizeHist();//autocontrast } //高斯平滑 Image <Gray, byte> smoothedGrayFrame = grayFrame.PyrDown(); smoothedGrayFrame = smoothedGrayFrame.PyrUp(); //canny Image <Gray, byte> cannyFrame = null; if (_smooth) { cannyFrame = smoothedGrayFrame.Canny(_thresh, _threshLinking); grayFrame = smoothedGrayFrame; } else { grayFrame = grayFrame.Canny(_thresh, _threshLinking); } //局部自适应阈值二值化,阈值本身作为了一个变量,检测更有效 //CvInvoke.cvAdaptiveThreshold(grayFrame, grayFrame, 255, Emgu.CV.CvEnum.ADAPTIVE_THRESHOLD_TYPE.CV_ADAPTIVE_THRESH_MEAN_C, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY, adaptiveThresholdBlockSize + adaptiveThresholdBlockSize % 2 + 1, adaptiveThresholdParameter); CvInvoke.AdaptiveThreshold(grayFrame, grayFrame, 255, Emgu.CV.CvEnum.AdaptiveThresholdType.MeanC, Emgu.CV.CvEnum.ThresholdType.Binary, _blockSize + _blockSize % 2 + 1, _parameter); // grayFrame._Not(); // if (cannyFrame != null) { grayFrame._Or(cannyFrame); //试验了一下,这样轮廓会更加明显 } _cannyImage = BitmapConvert.toBitmapImage(grayFrame.ToBitmap()); base.OnPropertyChanged("CannyImage"); } } catch (Exception ex) { MessageBox.Show(ex.Message); } } }