public virtual void ProcessImage(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image) { Emgu.CV.Image<Gray, byte> gray = image.Convert<Gray, byte>(); gray._ThresholdBinary(new Gray(_threshold), new Gray(255.0)); gray._Not(); Parsley.Core.EllipseDetector ed = new Parsley.Core.EllipseDetector(); ed.MinimumContourCount = _min_contour_count; List < Parsley.Core.DetectedEllipse > ellipses = new List<Parsley.Core.DetectedEllipse>(ed.DetectEllipses(gray)); List < Parsley.Core.DetectedEllipse > finals = new List<Parsley.Core.DetectedEllipse>( ellipses.Where(e => { return e.Rating < _distance_threshold; }) ); finals.Sort( (a, b) => { double dista = a.Ellipse.MCvBox2D.center.X * a.Ellipse.MCvBox2D.center.X + a.Ellipse.MCvBox2D.center.Y * a.Ellipse.MCvBox2D.center.Y; double distb = b.Ellipse.MCvBox2D.center.X * b.Ellipse.MCvBox2D.center.X + b.Ellipse.MCvBox2D.center.Y * b.Ellipse.MCvBox2D.center.Y; return dista.CompareTo(distb); } ); Bgr bgr = new Bgr(0, 255, 0); MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8); int count = 1; foreach (Parsley.Core.DetectedEllipse e in finals) { image.Draw(e.Ellipse, bgr, 2); image.Draw(count.ToString(), ref f, new System.Drawing.Point((int)e.Ellipse.MCvBox2D.center.X, (int)e.Ellipse.MCvBox2D.center.Y), bgr); count++; } }
public static Image<Bgr, Byte> DrawContours(Image<Gray, Byte> image) { double something = 30; List<VectorOfPoint> convertedContours = GetContours(image, ChainApproxMethod.ChainApproxSimple, RetrType.List, 0.001, something); Image<Bgr, Byte> result = new Image<Bgr, Byte>(image.Width, image.Height, new Bgr(0, 0, 0)); int counter = 1; Random randomGen = new Random(); KnownColor[] names = (KnownColor[])Enum.GetValues(typeof(KnownColor)); foreach (VectorOfPoint points in convertedContours) { KnownColor randomColorName = names[randomGen.Next(names.Length)]; Color randomColor = Color.FromKnownColor(randomColorName); Bgr color = new Bgr(randomColor); var temp = points.ToArray(); result.Draw(temp, color, 2); counter++; } return result; }
// rotation, theta is in degree public void Rotate(double theta,Bgr color) { Mat Tmat = new Mat(); // CvInvoke considers rotation differently to Emgu CV image wrapper, theta is actually -theta CvInvoke.GetRotationMatrix2D(new PointF(matImage.Width / 2, matImage.Height / 2), -theta, 1,Tmat); CvInvoke.WarpAffine(matImage, destImage, Tmat, new Size(matImage.Width, matImage.Height),borderValue:new MCvScalar(color.Blue,color.Green,color.Red)); }
static void Main(string[] args) { //select color Bgr<byte>[,] image = new Bgr<byte>[480, 640]; Hsv<byte> color = UI.PickColor(Bgr<byte>.Red).ToHsv(); //select mask Gray<byte>[,] mask = image.GetMask(); if (mask.AsEnumerable().Sum(x => x.Intensity) == 0) //if the mask is empty mask.SetValue<Gray<byte>>(Byte.MaxValue); //increase saturation incrementally for (int s = 0; s <= Byte.MaxValue; s++) { color.S = (byte)s; image.SetValue<Bgr<byte>>(color.ToBgr(), mask); image.Show(scaleForm: true); ((double)s / Byte.MaxValue).Progress(message: "Changing saturation"); Thread.Sleep(50); } //save last image string fileName = UI.SaveImage(); if (fileName != null) image.Save(fileName); //close all UI.CloseAll(); }
public object Convert(object value, Type targetType, object parameter, CultureInfo culture) { if (!String.IsNullOrWhiteSpace(parameter.ToString())) { var brush = (SolidColorBrush)value; var color = new Bgr(brush.Color.B,brush.Color.G,brush.Color.R); buffer.SetValue(color); var data = buffer.Convert<Hsv,byte>().Data; string param = parameter.ToString(); switch(param){ case "h": case "H": return data[0, 0, 0]; case "s": case "S": return data[0, 0, 1]; case "v": case "V": return data[0, 0, 2]; default: break; } } throw new ArgumentException("Wrong Parameter. It can be only one from H,h,S,s,V,v."); }
static void Main(string[] args) { UI.OpenImage(); Bgr<byte>[,] image = new Bgr<byte>[480, 640]; Hsv<byte> color = UI.PickColor(Bgr<byte>.Red).ToHsv(); for (int s = 0; s <= Byte.MaxValue; s++) { color.S = (byte)s; image.SetValue<Bgr<byte>>(color.ToBgr()); image.Show(scaleForm: true); ((double)s / Byte.MaxValue).Progress(message: "Changing saturation"); Thread.Sleep(50); } //save last image string fileName = UI.SaveImage(); if (fileName != null) image.Save(fileName); //close all UI.CloseAll(); }
private void DrawEvents(Image<Bgr, Byte> image, List<Event> events, Bgr color) { foreach(var ev in events) { image.Draw(ev.EventCoords, color, 1); } }
private void DrawPolyline(Image<Bgr, Byte> image, List<MarkovState> way, Bgr color) { for(int i = 1; i < way.Count; i++) { image.Draw(new LineSegment2D(Geometry.GetCenter(way[i].Coords), Geometry.GetCenter(way[i-1].Coords)), color, 3); } }
/// <summary> /// loading of the form /// </summary> public Form1() { try { IC = new IntrinsicCameraParameters(); } catch (Exception ex) { MessageBox.Show("Error: " + ex.Message); } InitializeComponent(); //fill line colour array Random R = new Random(); for (int i = 0; i < line_colour_array.Length; i++) { line_colour_array[i] = new Bgr(R.Next(0, 255), R.Next(0, 255), R.Next(0, 255)); } //set up cature as normal try { _Capture = new Capture(); _Capture.ImageGrabbed += new Emgu.CV.Capture.GrabEventHandler(_Capture_ImageGrabbed); _Capture.Start(); } catch (Exception ex) { MessageBox.Show("Error: " + ex.Message); } }
private CardColor ClassifyColor(Bgr avgBgr, Hsv avgHsv) { if (avgHsv.Satuation < 30) { return CardColor.White; } else if (avgHsv.Satuation < 45) { return CardColor.Other; } else if (avgBgr.Red > avgBgr.Blue && avgBgr.Red > avgBgr.Green) { return CardColor.Red; } else if (avgBgr.Green > avgBgr.Blue && avgBgr.Green > avgBgr.Red) { return CardColor.Green; } else if (avgBgr.Green < avgBgr.Blue && avgBgr.Green < avgBgr.Red) { return CardColor.Purple; } else { return CardColor.White; } }
public ImageFinder() { rectangles = new List<Rectangle>(); stopwatch = new Stopwatch(); Threshold = 0.85; fillColor = new Bgr(Color.Magenta); }
/// <summary> /// Sets the specified image. /// </summary> /// <param name="image">Image to display.</param> public void SetImage(Bgr<byte>[,] image) { if (bmp == null || bmp.Width != image.Width() || bmp.Height != image.Height()) { bmp = new Bitmap(image.Width(), image.Height(), PixelFormat.Format24bppRgb); } BitmapData bmpData = bmp.Lock(); if (bmpData.BytesPerPixel != image.ColorInfo().Size) { bmpData.Dispose(); bmpData = null; bmp = new Bitmap(image.Width(), image.Height(), PixelFormat.Format24bppRgb); } bmpData = bmpData ?? bmp.Lock(); using (var uIm = image.Lock()) { Copy.UnsafeCopy2D(uIm.ImageData, bmpData.Data, uIm.Stride, bmpData.ScanWidth, uIm.Height); } bmpData.Dispose(); imageView.Image = bmp; if (ScaleForm) ClientSize = new Size(image.Width(), image.Height()); }
static void Main(string[] args) { var img = new Bgr<byte>[480, 640]; //*********************************************************************************************************************************************************************** Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("********* TColor[,] <=> Image<> conversions (built-in) ****************"); Console.ResetColor(); //to Image<> Image<Bgr<byte>> lockedImg = img.Lock(); //from Image<> var arr = lockedImg.Clone(); //*********************************************************************************************************************************************************************** Console.WriteLine(); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("********* Image<,> <=> OpenCV conversions (built-in) ****************"); Console.ResetColor(); //to IplImage IplImage iplImage; using (var uImg = img.Lock()) { iplImage = uImg.AsOpenCvImage(); //data is shared } //from IplImage var imgFromIpl = iplImage.AsImage(); //*********************************************************************************************************************************************************************** Console.WriteLine(); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("*********** Image<,> <=> Bitmap conversions (BitmapInterop) ****************"); Console.ResetColor(); //to Bitmap var bmp = img.ToBitmap(); //from Bitmap var imgFromBmp = bmp.ToArray(); }
private double s = 0; // Superpixel Intervall Int #endregion Fields #region Constructors // =============== Konstruktor =============== public Superpixels(Image<Bgr, Byte> imageBgr, int superpixelCount) { // Werte setzen k = superpixelCount; n = imageBgr.Width * imageBgr.Height; s = Math.Sqrt((double)n / (double)k); area = Convert.ToInt32(2 * s * 2 * s); m = 12; // BGR to LAB Umrechnung und Vektormatrix erstellen imageLab = imageBgr.Convert<Lab, Byte>(); pixels = new Pixel[imageBgr.Width, imageBgr.Height]; for (int r = 0; r < imageLab.Height; r++) { for (int c = 0; c < imageLab.Width; c++) { double l = (double)imageLab.Data[r, c, 0] * 100 / 255; double a = (double)imageLab.Data[r, c, 1] - 128; double b = (double)imageLab.Data[r, c, 2] - 128; Bgr bgr = new Bgr(imageBgr.Data[r, c, 0], imageBgr.Data[r, c, 1], imageBgr.Data[r, c, 2]); pixels[c, r] = new Pixel(new Vector5(l, a, b, c, r), bgr); //Console.WriteLine("BGR = " + imageBgr.Data[r, c, 0] + " " + imageBgr.Data[r, c, 1] + " " + imageBgr.Data[r, c, 2]); //Console.WriteLine("RGB = " + imageBgr.Data[r, c, 2] + " " + imageBgr.Data[r, c, 1] + " " + imageBgr.Data[r, c, 0]); //Console.WriteLine("LAB = " + labValues[r, c].X + " " + labValues[r, c].Y + " " + labValues[r, c].Z); } } }
private void rotatePhotos(object parameters) { object[] paramsArray = (object[])parameters; List<string> fileNames = (List<string>)paramsArray[0]; PointF rotationCenter = (PointF)paramsArray[1]; Bitmap referencePic = new Bitmap(fileNames.First()); Image<Bgr, Byte> referenceImage = new Image<Bgr, Byte>(referencePic); byte[] timeTakenRaw = referencePic.GetPropertyItem(36867).Value; string timeTaken = System.Text.Encoding.ASCII.GetString(timeTakenRaw, 0, timeTakenRaw.Length - 1); DateTime referenceTime = DateTime.ParseExact(timeTaken, "yyyy:MM:d H:m:s", System.Globalization.CultureInfo.InvariantCulture); referencePic.Dispose(); Bgr background = new Bgr(0, 0, 0); foreach (string filename in fileNames) { Bitmap currentPic = new Bitmap(filename); timeTakenRaw = currentPic.GetPropertyItem(36867).Value; timeTaken = System.Text.Encoding.ASCII.GetString(timeTakenRaw, 0, timeTakenRaw.Length - 1); DateTime date = DateTime.ParseExact(timeTaken, "yyyy:MM:d H:m:s", System.Globalization.CultureInfo.InvariantCulture); double secondsShift = (date - referenceTime).TotalSeconds; double rotationAngle = secondsShift / stellarDay * 360; RotationMatrix2D<double> rotationMatrix = new RotationMatrix2D<double>(rotationCenter, -rotationAngle, 1); using (Image<Bgr, Byte> rotatedImage = new Image<Bgr, Byte>(currentPic)) { referenceImage = referenceImage.Max(rotatedImage.WarpAffine<double>(rotationMatrix, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC, Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS, background)); } pictureProcessed(this, new PictureProcessedEventArgs(referenceImage)); currentPic.Dispose(); } pictureProcessingComplete(this, new EventArgs()); }
public Filter(string name, int minCount, Bgr minBgr, Bgr maxBgr) { _name = name; _minCount = minCount; _minBgr = minBgr; _maxBgr = maxBgr; }
private HoughResult ProcessHoughTest(Image<Bgr, byte> image) { const int leftMargin = 0; const int upMargin = 300; const int downMargin = 200; var size = image.Size; image.ROI = new Rectangle(leftMargin, upMargin, size.Width - leftMargin * 2, size.Height - upMargin - downMargin); HoughLines.PreprocessImage(image); var result = HoughLines.Compute(image); lineCache_.AddResult(result); result = lineCache_.GetCachedResult(); result.MoveRoiResult(leftMargin, upMargin); image.ROI = Rectangle.Empty; var red = new Bgr(Color.Red); foreach (var line in result.SolidLines) { image.Draw(line, red, 3); } return result; }
public VShape(Point[] shape, Bgr newID) { _shape = shape; _isSquare = false; _isTriangle = false; health = 10; ID = newID; int len = _shape.Length; sides = new double[len]; for (int i = 0; i < len - 1; i++) { sides[i] = Distance(_shape[i], _shape[i + 1]); } sides[len - 1] = Distance(_shape[len - 1], _shape[0]); scale = Math.Round(FindScale(), 2); if (IsRegular() && IsEquilateral()) { if (_shape.Length == 3) _isTriangle = true; else if (_shape.Length == 4) _isSquare = true; } }
public void ClassifyTest() { BgrClassifier target = new BgrClassifier(); Bgr value = new Bgr(145, 110, 197); //Purple CardColor actual = target.Classify(value); Assert.AreEqual(CardColor.Purple, actual); }
private void Form1_Load(object sender, EventArgs e) { if (openImageFileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK) { Bgr drawColor = new Bgr(Color.Blue); try { Image<Bgr, Byte> image = new Image<Bgr, byte>(openImageFileDialog.FileName); original.Image = image.ToBitmap(); original.SizeMode = PictureBoxSizeMode.Zoom; using (Image<Gray, byte> gray = image.Convert<Gray, Byte>()) { _ocr.Recognize(gray); Tesseract.Charactor[] charactors = _ocr.GetCharactors(); foreach (Tesseract.Charactor c in charactors) { image.Draw(c.Region, drawColor, 1); } processed.Image = image.ToBitmap(); processed.SizeMode = PictureBoxSizeMode.Zoom; //String text = String.Concat( Array.ConvertAll(charactors, delegate(Tesseract.Charactor t) { return t.Text; }) ); String text = _ocr.GetText(); ocrTextBox.Text = text; } } catch (Exception exception) { MessageBox.Show(exception.Message); } } }
public Filter() { _name = ""; _minCount = int.MaxValue; _minBgr = new Bgr(0, 0, 0); _maxBgr = new Bgr(255, 255, 255); }
private void drawDetections(IEnumerable<Rectangle> detections, Bgr<byte> color, int thickness) { foreach (var detection in detections) { debugImage.Draw(detection, color, thickness); } }
public Image<Bgr, byte> GetBigPicture() { MCvFont font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 1, 1); font.thickness = 2; Image<Bgr, byte> newImage = MostRecentImage.Image.Clone(); foreach (Note note in _logic.CurrentNotes.Where(n => n.DetectedInFrames > 1)) { Point playAreaOffset = new Point(218, 262); Point trackOffset; switch (note.TrackColor) { case NoteType.Red: trackOffset = new Point(0, 0); break; case NoteType.Yellow: trackOffset = new Point(66, 0); break; case NoteType.Blue: trackOffset = new Point(134, 0); break; case NoteType.Green: trackOffset = new Point(176, 0); break; default: trackOffset = new Point(0, 0); break; } Rectangle drawRectangle = new Rectangle(playAreaOffset.X + trackOffset.X + note.Rectangle.X, playAreaOffset.Y + trackOffset.Y + note.Rectangle.Y, note.Rectangle.Width, note.Rectangle.Height); Bgr color = new Bgr(0, 0, 0); switch (note.Color) { case NoteType.Red: color = new Bgr(0, 0, 255); break; case NoteType.Yellow: color = new Bgr(0, 255, 255); break; case NoteType.Blue: color = new Bgr(255, 0, 0); break; case NoteType.Green: color = new Bgr(0, 255, 0); break; case NoteType.Orange: color = new Bgr(255, 255, 255); break; } newImage.Draw(drawRectangle, color, 2); newImage.Draw(note.FramesUntilHit.ToString("##.##"), ref font, new Point(drawRectangle.Left, drawRectangle.Bottom), new Bgr(0, 0, 0)); //newImage.Draw("X" + note.PerFrameVelocityX + ";Y" + note.PerFrameVelocityY, ref font, // new Point(drawRectangle.Left, drawRectangle.Bottom), new Bgr(0,0,0)); //newImage.Draw(((int) note.DistanceToTarget).ToString(), ref font, // new Point(drawRectangle.Left, drawRectangle.Bottom), new Bgr(128, 128, 128)); } return newImage; }
/// <summary> /// Draws text on the provided image. /// </summary> /// <param name="image">Input image.</param> /// <param name="text">User text.</param> /// <param name="font">Font.</param> /// <param name="botomLeftPoint">Bottom-left point.</param> /// <param name="color">Text color.</param> /// <param name="opacity">Sets alpha channel where 0 is transparent and 255 is full opaque.</param> public unsafe static void Draw(this Bgr<byte>[,] image, string text, Font font, Point botomLeftPoint, Bgr<byte> color, byte opacity = Byte.MaxValue) { using(var img = image.Lock()) { var iplImage = img.AsCvIplImage(); CvCoreInvoke.cvPutText(&iplImage, text, botomLeftPoint, ref font, color.ToCvScalar()); } }
// Konstruktor public Pixel(Vector5 _vector, Bgr _bgr) { vector = _vector; distance = 9999; clusterNr = -1; bgr = _bgr; scanned = false; }
public unsafe void ValueType() { var ptr = stackalloc Bgr[1]; *ptr = new Bgr { B = 1, G = 2, R = 3 }; Unsafe.Write(ptr, new Bgr { B = 11, G = 22, R = 33 }); Assert.Equal(11, ptr[0].B); Assert.Equal(22, ptr[0].G); Assert.Equal(33, ptr[0].R); }
private double ColorDistance(Bgr a, Bgr b) { //do an euclidian distance on R, G and B double blue = Math.Abs(a.Blue - b.Blue) * Math.Abs(a.Blue - b.Blue); double green = Math.Abs(a.Green - b.Green) * Math.Abs(a.Green - b.Green); double red = Math.Abs(a.Red - b.Red) * Math.Abs(a.Red - b.Red); double result = Math.Sqrt(blue + green + red); return result; }
private MCvPoint3D32f[] _points; //Computer3DPointsFromStereoPair #endregion public StereoCameraCalibrationService() { //set up chessboard drawing array Random R = new Random(); for (int i = 0; i < _lineColourArray.Length; i++) { _lineColourArray[i] = new Bgr(R.Next(0, 255), R.Next(0, 255), R.Next(0, 255)); } }
public StereoImageControl() { InitializeComponent(); ViewModel = new StereoImageViewModel(); DataContext = ViewModel; _bgrBlack = new Bgr(Color.Black); ViewModel.PropertyChanged += ViewModelOnPropertyChanged; }
protected override void OnDraw(ref Image <Bgr, Byte> src) { double maxDistMul = (double)GetVar("MaxDistMul"); double minDist = (double)GetVar("MinDist"); double verticalMul = (double)GetVar("VerticalMul"); GloveHand lastSides = HandRecord.LastRightWithBothSides(); if (lastSides == null) { return; } Bgr col = new Bgr(0, 255, 0); Bgr col2 = new Bgr(0, 200, 0); if (GeometryExt.Distance(lastSides.SideFingers[0].LightCenter, lastSides.SideFingers[1].LightCenter) > minDist * maxDistMul) { col = new Bgr(0, 0, 255); col2 = new Bgr(0, 0, 200); } if (GeometryExt.Distance(lastSides.SideFingers[0].LightCenter, lastSides.SideFingers[1].LightCenter) < minDist) { col = new Bgr(200, 200, 200); col2 = new Bgr(150, 150, 150); } if (Math.Abs(lastSides.SideFingers[0].LightCenter.Y - lastSides.SideFingers[1].LightCenter.Y) >= Math.Abs(lastSides.SideFingers[0].LightCenter.X - lastSides.SideFingers[1].LightCenter.X) * verticalMul) // i side sono messi in verticale { col2 = new Bgr(255, 0, 0); } if (lastSides.SideFingers.Length == 2) { System.Drawing.Point p1 = lastSides.SideFingers[0].LightCenter; System.Drawing.Point p2 = lastSides.SideFingers[1].LightCenter; float smallC = ((float)GeometryExt.Distance(p1, p2) / 2) - 8; float bigC = ((float)GeometryExt.Distance(p1, p2) / 2) + 8; if (smallC < 0) { smallC = 0; } if (bigC < 0) { bigC = 0; } src.Draw(new CircleF(lastSides.SideMiddlePoint, smallC), col2, 3); src.Draw(new CircleF(lastSides.SideMiddlePoint, bigC), col, 3); src.Draw(new CircleF(lastSides.SideMiddlePoint, 5), new Bgr(255, 0, 255), -1); } }
public Image <Bgr, byte> ReturnRotated(Image <Bgr, byte> image, double angle, int centerX = 0, int centerY = 0) { angle = angle / 57.2956; var resultImage = image.CopyBlank(); for (int x = 0; x < resultImage.Width - 1; x++) { for (int y = 0; y < resultImage.Height - 1; y++) { int newX = Convert.ToInt32(Math.Cos(angle) * (x - centerX) - Math.Sin(angle) * (y - centerY)) + centerX; int newY = Convert.ToInt32(Math.Sin(angle) * (x - centerX) + Math.Cos(angle) * (y - centerY)) + centerY; if (newX < resultImage.Width && newY < resultImage.Height && newX >= 0 && newY >= 0) { double X = newX; double Y = newY; double baseX = Math.Floor(X); double baseY = Math.Floor(Y); if (baseX >= image.Width - 1 || baseY >= image.Height - 1) { continue; } double rX = X - baseX; double rY = Y - baseY; double irX = 1 - rX; double irY = 1 - rY; Bgr c = new Bgr(); Bgr c1 = new Bgr(); Bgr c2 = new Bgr(); c1.Blue = image.Data[(int)baseY, (int)baseX, 0] * irX + image.Data[(int)baseY, (int)baseX + 1, 0] * rX; c1.Green = image.Data[(int)baseY, (int)baseX, 1] * irX + image.Data[(int)baseY, (int)baseX + 1, 1] * rX; c1.Red = image.Data[(int)baseY, (int)baseX, 2] * irX + image.Data[(int)baseY, (int)baseX + 1, 2] * rX; c2.Blue = image.Data[(int)baseY + 1, (int)baseX, 0] * irX + image.Data[(int)baseY + 1, (int)baseX + 1, 0] * rX; c2.Green = image.Data[(int)baseY + 1, (int)baseX, 0] * irX + image.Data[(int)baseY + 1, (int)baseX + 1, 0] * rX; c2.Red = image.Data[(int)baseY + 1, (int)baseX, 0] * irX + image.Data[(int)baseY + 1, (int)baseX + 1, 0] * rX; c.Blue = c1.Blue * irY + c2.Blue * rY; c.Green = c1.Green * irY + c2.Green * rY; c.Red = c1.Red * irY + c2.Red * rY; resultImage[y, x] = c; } } } return(resultImage); }
public void DetectConnected() { var classificator = new SkyCloudClassification(processingImage, defaultProperties); classificator.Classify(); DenseMatrix dmSkyIndexDataBinary = classificator.dmSkyIndexDataBinary(); Image <Gray, Byte> imgSkyIndexDataBinary = ImageProcessing.grayscaleImageFromDenseMatrixWithFixedValuesBounds(dmSkyIndexDataBinary, 0.0d, 1.0d, true); imgSkyIndexDataBinary = imgSkyIndexDataBinary.Mul(classificator.maskImage); Image <Bgr, Byte> previewImage = imgSkyIndexDataBinary.CopyBlank().Convert <Bgr, Byte>(); // Contour<Point> contoursDetected = imgSkyIndexDataBinary.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST); #region // EmguCV 3.0 //VectorOfVectorOfPoint contoursDetected = new VectorOfVectorOfPoint(); //CvInvoke.FindContours(imgSkyIndexDataBinary, contoursDetected, null, Emgu.CV.CvEnum.RetrType.List, // Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); //contoursArray = new List<VectorOfPoint>(); //int count = contoursDetected.Size; //var colorGen = new RandomPastelColorGenerator(); //for (int i = 0; i < count; i++) //{ // Color currentColor = colorGen.GetNext(); // var currentColorBgr = new Bgr(currentColor.B, currentColor.G, currentColor.R); // using (VectorOfPoint currContour = contoursDetected[i]) // { // contoursArray.Add(currContour); // previewImage.Draw(currContour.ToArray(), currentColorBgr, -1); //.Draw(currContour, currentColorBgr, -1); // } //} #endregion // EmguCV 3.0 List <Contour <Point> > contoursDetected = imgSkyIndexDataBinary.DetectContours(); RandomPastelColorGenerator colorGen = new RandomPastelColorGenerator(); foreach (Contour <Point> currContour in contoursDetected) { Color currentColor = colorGen.GetNext(); Bgr currentColorBgr = new Bgr(currentColor.B, currentColor.G, currentColor.R); previewImage.Draw(currContour, currentColorBgr, -1); } ThreadSafeOperations.SetTextTB(tbLog, "Количество выделенных объектов: " + contoursArray.Count + Environment.NewLine, true); //ShowImageForm ImgShow = new ShowImageForm(localPreviewBitmap, ParentForm, this); var imgShow = new SimpleShowImageForm(previewImage); imgShow.Show(); }
public static Image <Bgr, Byte> ColorLevel(this Image <Gray, Byte> src, ConnectLevel conn) { var dst = src.Convert <Bgr, Byte>(); var color = new Bgr(0, 0, 255); foreach (var domain in conn.Domains) { foreach (var p in domain.Points) { dst[p.Y, p.X] = color; } } return(dst); }
public static void GetSquareSuperpixelImages(Image <Gray, Byte> image, string dir, string imageName, int regionSize = 20) { var numOfPixels = (image.Width / regionSize) * (image.Height / regionSize); var segmentedImage = new Image <Bgr, Byte>(image.Width, image.Height); CvInvoke.CvtColor(image, segmentedImage, Emgu.CV.CvEnum.ColorConversion.Gray2Bgr); var meanImage = new Image <Gray, Byte>(image.Width, image.Height); var labels = new int[image.Width, image.Height]; var superpixelColors = new int[numOfPixels]; var pixelCount = new int[numOfPixels]; for (int i = 0; i < image.Width; i++) { for (int j = 0; j < image.Height; j++) { labels[i, j] = 1 + i / regionSize + (image.Width / regionSize) * (j / regionSize); if ((i % regionSize == 0 || j % regionSize == 0) && (i < image.Width - 1 && j < image.Height - 1)) { segmentedImage[i, j] = new Bgr(Color.Red); } } } for (int i = 0; i < image.Width; i++) { for (int j = 0; j < image.Height; j++) { var label = labels[i, j]; superpixelColors[label - 1] += (int)image[i, j].Intensity; pixelCount[label - 1] += 1; } } for (int i = 0; i < superpixelColors.Length; i++) { if (pixelCount[i] != 0) { superpixelColors[i] /= pixelCount[i]; } } for (int i = 0; i < image.Width; i++) { for (int j = 0; j < image.Height; j++) { var pixelNum = labels[i, j]; meanImage[i, j] = new Gray(superpixelColors[pixelNum - 1]); } } meanImage.Save(dir + @"\" + imageName + "_meanColors.png"); segmentedImage.Save(dir + @"\" + imageName + "_segmented.png"); }
private void splineToolStripMenuItem_Click(object sender, EventArgs e) { img1_gp = gaussianPyr(img1); img2_gp = gaussianPyr(img2); img1_lp = laplacianPyr(img1, img1_gp); img2_lp = laplacianPyr(img2, img2_gp); lap = new Image <Bgr, byte> [dim]; Image <Bgr, byte> iml, imr, imf; for (int k = 0; k < dim - 1; k++) { iml = img1_lp[k]; imr = img2_lp[k]; imf = new Image <Bgr, byte>(iml.Height, iml.Width); for (int i = 0; i < imf.Height; i++) { for (int j = 0; j < imf.Width; j++) { //if (i < Math.Pow(2, Math.Log - 1)) if (j < imf.Height / 2) { imf[i, j] = iml[i, j]; } else if (j == imf.Height / 2) { imf[i, j] = new Bgr((iml[i, j].Blue + imr[i, j].Blue) / 2, (iml[i, j].Green + imr[i, j].Green) / 2, (iml[i, j].Red + imr[i, j].Red) / 2); } else { imf[i, j] = imr[i, j]; } } } lap[k] = imf; } Image <Bgr, byte>[] fimg = lap; for (int k = dim - 2; k > 0; k--) { Image <Bgr, byte> temp = new Image <Bgr, byte>(fimg[k - 1].Height, fimg[k - 1].Width); //fimg[k - 1] = addImage(fimg[k - 1], expand(fimg[k])); CvInvoke.cvPyrUp(fimg[k], temp, FILTER_TYPE.CV_GAUSSIAN_5x5); fimg[k - 1] = fimg[k - 1] + temp; } fimg[0]._EqualizeHist(); imageBox3.Image = fimg[0]; }
public static void TestImageToArray() { Image <Bgr, byte> img = new Image <Bgr, byte>(640, 480); img[5, 5] = new Bgr(128, 64, 32); var arr = img.ToArray(); Console.WriteLine("Color image to array: " + arr[0, 5, 5] + " " + arr[1, 5, 5] + " " + arr[2, 5, 5]); /*Debug.Assert(arr[5, 5, 0] == 128 && * arr[5, 5, 1] == 64 && * arr[5, 5, 2] == 32);*/ }
/// <summary> /// Swaps the channels for an image provided by a url. /// </summary> /// <param name="order">Channel ordering. Each value has to be [0..2] range.</param> /// <param name="imgUrl">Image url.</param> /// <returns>Processed image.</returns> public Bgr <byte>[,] SwapImageChannels(Uri imgUrl, int[] order) { if (order.Any(x => x < 0 || x > CHANNEL_COUNT - 1)) { throw new ArgumentException(String.Format("Each element of the channel order must be in: [{0}..{1}] range.", 0, CHANNEL_COUNT - 1)); } Bgr <byte>[,] image = null; try { image = imgUrl.GetBytes().DecodeAsColorImage(); } catch (Exception ex) { throw new Exception("The specified url does not point to a valid image.", ex); } image.Apply(c => swapChannels(c, order), inPlace: true); return(image); }
private void ProcessHeadPose(string[] p, Image <Bgr, byte> curFrame) { MCvFont f = Font; Features.RotationMatrix = new float[9]; Features.TranslationVector = new float[3]; for (int i = 4; i < 12; i++) { Features.RotationMatrix[i - 4] = Convert.ToSingle(p[i], new CultureInfo("en-GB")); } for (int i = 13; i < 16; i++) { Features.TranslationVector[i - 13] = Convert.ToSingle(p[i], new CultureInfo("en-GB")); } Features.SetModelPoints(); Bgr headPoseColor = new Bgr(Color.DodgerBlue); curFrame.Draw("O(" + Convert.ToInt32(Features.ModelPoints[0].X) + ", " + Convert.ToInt32(Features.ModelPoints[0].Y) + ")", ref f, PointFToPoint(Features.ModelPoints[0]), headPoseColor); curFrame.Draw("X(" + Convert.ToInt32(Features.ModelPoints[1].X) + ", " + Convert.ToInt32(Features.ModelPoints[1].Y) + ")", ref f, PointFToPoint(Features.ModelPoints[1]), headPoseColor); curFrame.Draw("Y(" + Convert.ToInt32(Features.ModelPoints[2].X) + ", " + Convert.ToInt32(Features.ModelPoints[2].Y) + ")", ref f, PointFToPoint(Features.ModelPoints[2]), headPoseColor); curFrame.Draw("Z(" + Convert.ToInt32(Features.ModelPoints[3].X) + ", " + Convert.ToInt32(Features.ModelPoints[3].Y) + ")", ref f, PointFToPoint(Features.ModelPoints[3]), headPoseColor); foreach (PointF mp in Features.ModelPoints) { curFrame.Draw(new CircleF(mp, 3), headPoseColor, -1); } curFrame.Draw(new LineSegment2DF(Features.ModelPoints[0], Features.ModelPoints[1]), headPoseColor, 2); curFrame.Draw(new LineSegment2DF(Features.ModelPoints[0], Features.ModelPoints[2]), headPoseColor, 2); curFrame.Draw(new LineSegment2DF(Features.ModelPoints[0], Features.ModelPoints[3]), headPoseColor, 2); Features.Distance = Convert.ToSingle(p[16], new CultureInfo("en-GB")); Rectangle r1 = new Rectangle(0, curFrame.Height - 30, curFrame.Width, curFrame.Height); Rectangle r2 = new Rectangle(0, curFrame.Height - 30, Convert.ToInt32(Features.Distance * curFrame.Width), curFrame.Height); curFrame.Draw(r1, new Bgr(Color.LightGray), -1); curFrame.Draw(r2, new Bgr(Color.Red), -1); curFrame.Draw("Distance: " + Convert.ToInt32(Features.Distance * 100.0) + "%", ref f, new Point(10, curFrame.Height - 7), new Bgr(0, 0, 0)); MainWindow.MapUC.MyMap.ZoomLevel = Math.Round(((1.0 - Features.Distance) * MainWindow.MapUC.SliderZoom.Maximum) / 2.0, 0) * 2; }
public Image <Bgr, byte> editBiScale(Image <Bgr, byte> sourceImage, double k) { Image <Bgr, byte> scaleImg = new Image <Bgr, byte>((int)(sourceImage.Width * k), (int)(sourceImage.Height * k)); for (int i = 0; i < scaleImg.Width - 1; i++) { for (int j = 0; j < scaleImg.Height - 1; j++) { double I = (i / k); double J = (j / k); double baseI = Math.Floor(I); double baseJ = Math.Floor(J); if (baseI >= sourceImage.Width - 1) { continue; } if (baseJ >= sourceImage.Height - 1) { continue; } double rI = I - baseI; double rJ = J - baseJ; double irI = 1 - rI; double irJ = 1 - rJ; Bgr c1 = new Bgr(); c1.Blue = sourceImage.Data[(int)baseJ, (int)baseI, 0] * irI + sourceImage.Data[(int)baseJ, (int)baseI + 1, 0] * rI; c1.Green = sourceImage.Data[(int)baseJ, (int)baseI, 1] * irI + sourceImage.Data[(int)baseJ, (int)baseI + 1, 1] * rI; c1.Red = sourceImage.Data[(int)baseJ, (int)baseI, 2] * irI + sourceImage.Data[(int)baseJ, (int)baseI + 1, 2] * rI; Bgr c2 = new Bgr(); c2.Blue = sourceImage.Data[(int)(baseJ + 1), (int)baseI, 0] * irI + sourceImage.Data[(int)(baseJ + 1), (int)baseI + 1, 0] * rI; c2.Green = sourceImage.Data[(int)(baseJ + 1), (int)baseI, 1] * irI + sourceImage.Data[(int)(baseJ + 1), (int)baseI + 1, 1] * rI; c2.Red = sourceImage.Data[(int)(baseJ + 1), (int)baseI, 2] * irI + sourceImage.Data[(int)(baseJ + 1), (int)baseI + 1, 2] * rI; Bgr c = new Bgr(); c.Blue = c1.Blue * irJ + c2.Blue * rJ; c.Green = c1.Green * irJ + c2.Green * rJ; c.Red = c1.Red * irJ + c2.Red * rJ; scaleImg[j, i] = c; } } return(scaleImg); }
public void DetectBackgroundTest() { //var path1 = "PicassoUnitTest/DetectBackgroundTest/14-211-222.png"; //var path1 = "PicassoUnitTest/DetectBackgroundTest/107-183-51.png"; var path1 = "PicassoUnitTest/DetectBackgroundTest/249-238-32.png"; var filepath1 = Path.Combine(Drive.GetDriveRoot(), path1); Bitmap image1 = new Bitmap(filepath1); Bgr expectedBackGround = new Bgr(249, 238, 32); Bgr actualBackGround = Picasso.Heuristics.DetectBackground(image1, 10); Assert.IsTrue(Picasso.Utility.IsEqual(expectedBackGround, actualBackGround)); }
public static void ApplyLut(ref double[,] source, out Emgu.CV.Image <Bgr, Byte> destination) { destination = new Image <Bgr, Byte>(source.GetLength(0), source.GetLength(1)); for (int y = 0; y < source.GetLength(1); y++) { for (int x = 0; x < source.GetLength(0); x++) { var cId = Convert.ToInt32(source[y, x]); destination[y, x] = new Bgr(Colors[cId, 0], Colors[cId, 1], Colors[cId, 2]); } } }
public static void Main() { var sourceName = String.Empty; //video over pipe (direct link and Youtube) (do not support seek) //var pipeName = new Uri("http://trailers.divx.com/divx_prod/divx_plus_hd_showcase/BigBuckBunny_DivX_HD720p_ASP.divx").NamedPipeFromVideoUri(); //web-video var pipeName = new Uri("https://www.youtube.com/watch?v=Vpg9yizPP_g").NamedPipeFromYoutubeUri(); //Youtube sourceName = String.Format(@"\\.\pipe\{0}", pipeName); //video http link (Supports seek) //sourceName = "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"; //--------------------------------------------- ImageStreamReader reader = new FileCapture(sourceName); reader.Open(); //seek if you can if (reader.CanSeek) { reader.Seek((int)(reader.Length * 0.25), System.IO.SeekOrigin.Begin); } //read video frames Bgr <byte>[,] frame = null; while (true) { reader.ReadTo(ref frame); if (frame == null) { break; } frame.Show(scaleForm: true); ((double)reader.Position / reader.Length).Progress(); } Console.WriteLine("The end."); //--------------------------------------------------------------------------- UI.CloseAll(); Console.WriteLine("Downloading video..."); string fileExtension; var downloadPipeName = new Uri("https://www.youtube.com/watch?v=Vpg9yizPP_g").NamedPipeFromYoutubeUri(out fileExtension); //Youtube downloadPipeName.SaveNamedPipeStream("out" + fileExtension); Console.WriteLine("Video saved."); Process.Start("out" + fileExtension); //open local file }
private void button2_Click(object sender, EventArgs e) { if ((textBox1.Text.Length == 0) || (textBox2.Text.Length == 0)) { MessageBox.Show("You must enter player or team name", "Error!", MessageBoxButtons.OK, MessageBoxIcon.Asterisk); return; } if (textBox1.Text == textBox2.Text) { MessageBox.Show("Player names can't be same", "Error!", MessageBoxButtons.OK, MessageBoxIcon.Asterisk); return; } String fnames = @"\^"; if (Regex.IsMatch(textBox1.Text, fnames) || Regex.IsMatch(textBox2.Text, fnames)) { MessageBox.Show("Player names can't have symbol \"^\" !", "Error!", MessageBoxButtons.OK, MessageBoxIcon.Asterisk); return; } Global.text = AddNames.Ret_names(textBox1.Text, textBox2.Text); Image <Bgr, byte> lll = new Image <Bgr, byte>(50, 50, new Bgr(trackBar1.Value, trackBar2.Value, trackBar3.Value)); Mat ll = lll.Mat; CvInvoke.CvtColor(ll, ll, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv); Bgr color = (ll.ToImage <Bgr, byte>())[20, 20]; Global.colors = new Col(color.Blue, color.Green, color.Red); OpenFileDialog opf = new OpenFileDialog { Filter = "Video files | *.avi; *.mp4; *.mov" }; if (opf.ShowDialog() == DialogResult.OK) { Global.name = opf; } else { return; } Global.videoFromFile = true; Global.cancel = false; this.Close(); }
private void Config() { var bgrImage = new Image <Bgr, byte>(HueColor.Size); for (int y = 0; y < bgrImage.Height; y++) { for (int x = 0; x < bgrImage.Width; x++) { bgrImage[y, x] = new Bgr(Color.White); } } hueImage = bgrImage.Convert <Hsv, byte>(); HueColor.Image = hueImage; }
Image <Bgr, Byte> yCbCrThreshold(Image <Bgr, Byte> originalImage) { Image <Bgr, Byte> newImage = new Image <Bgr, byte>(originalImage.Width, originalImage.Height); for (int Y = 0; Y < originalImage.Height; Y++) { for (int X = 0; X < originalImage.Width; X++) { double rRaw = originalImage[Y, X].Red; double gRaw = originalImage[Y, X].Green; double bRaw = originalImage[Y, X].Blue; double r = rRaw / 255; double g = gRaw / 255; double b = bRaw / 255; double y = 0.299 * r + 0.587 * g + 0.114 * b; double cB = -0.168736 * r + -0.331264 * g + 0.500 * b; double cR = 0.500 * r + -0.418688 * g + -0.081312 * b; bool isFire = false; if (y >= cR && cR >= cB) { double crcb = cR - cB; double ycb = y - cB; if (!((crcb >= -0.1 && ycb >= -0.1 && ycb <= 0.3) || (crcb >= 0 && crcb <= 0.4 && ycb >= 0 && ycb <= 0.8))) { isFire = true; } } if (isFire) { isFire = !(cR - cB > -0.1 && y - cB > -0.1 && y - cB <= 0.6); } if (isFire) { newImage[Y, X] = originalImage[Y, X]; } else { newImage[Y, X] = new Bgr(0, 0, 0); } } } return(newImage); }
public void IsEqualTest() { // Create equal and not equal colors and ensure the IsEqual Methods returns values as expected Console.WriteLine("Starting Picasso.Utility.IsEqual method testing"); var color_a = new Bgr(0, 0, 255); var color_b = new Bgr(Color.Red); var color_c = new Bgr(Color.Green); Assert.IsTrue(Utility.IsEqual(color_a, color_b)); Assert.IsFalse(Utility.IsEqual(color_a, color_c)); Assert.IsFalse(Utility.IsEqual(color_b, color_c)); Console.WriteLine("Successfully completed Picasso.Utility.IsEqual Test"); }
/// <summary> /// Image Negative using EmguCV library /// Slower method /// </summary> /// <param name="img">Image</param> internal static void Negative(Image <Bgr, byte> img) { Bgr aux; for (int y = 0; y < img.Height; y++) { for (int x = 0; x < img.Width; x++) { // emguCV access: slower aux = img[y, x]; img[y, x] = new Bgr(255 - aux.Blue, 255 - aux.Green, 255 - aux.Red); } } }
private void pictureBox2_MouseDown(object sender, MouseEventArgs e) { Image <Bgr, byte> img = new Image <Bgr, byte>(bgr_img.Bitmap).Resize(pictureBox2.Width, pictureBox2.Height, Emgu.CV.CvEnum.Inter.Area); //获取imagebox1 控件的图片,并重置大小与控件的大小一致。 Bgr color = img[e.Y, e.X]; //获得鼠标点击位置的坐标。 label1.Text = color.ToString(); //显示颜色的数值。 Image <Bgr, byte> img1 = new Image <Bgr, byte>(imageBox2.Width, imageBox2.Height, color); //以固定颜色创建一张图片,并显示在ImageBox2。用于再次确定颜色是否正确。 pictureBox1.Image = img1.Bitmap; }
public TrackingControl() { InitializeComponent(); //The following detector types are supported: "MIL" – TrackerMIL; "BOOSTING" – TrackerBoosting _trackingDetector = new TrackingDetector("MIL"); _camshiftDetector = new CamshiftDetector(); _bgrRed = new Bgr(Color.Red); _bgrBlue = new Bgr(Color.Blue); _imageBoxSelector = new ImageBoxSelector(); this.Load += TrackingControl_Load; }
/// <summary> /// Flood fill in a BFS manner, so as not to overwhelm the stack /// </summary> /// <param name="image">the image we wish to fill on</param> /// <param name="xpixel">the x pixel to sample from</param> /// <param name="ypixel">the y pixel to sample from</param> /// <param name="threshold">the threshold of difference</param> /// <returns>the background which can be subtracted</returns> public static Bitmap FloodFill(Bitmap image, int xpixel, int ypixel, double threshold) { //create an identically sized "background" image and fill it white Emgu.CV.Image <Bgr, Byte> imBackground = new Image <Bgr, byte>(image.Width, image.Height); Emgu.CV.Image <Bgr, Byte> imImage = new Image <Bgr, byte>(image); Bgr bgrTarget = imImage[xpixel, ypixel]; Bgr color = new Bgr(255, 255, 255); Bgr white = new Bgr(255, 255, 255); for (int ii = 0; ii < image.Width; ii++) { for (int jj = 0; jj < image.Height; jj++) { imBackground[jj, ii] = white; } } Queue <System.Drawing.Point> pointQueue = new Queue <System.Drawing.Point>(); pointQueue.Enqueue(new System.Drawing.Point(xpixel, ypixel)); Bgr gray = new Bgr(Color.Gray); Bgr mask_color = new Bgr(MASK_COLOR); System.Drawing.Point[] pList = new System.Drawing.Point[4]; log.Info("Being iterative flood fill"); while (!(pointQueue.Count == 0)) //make sure queue isn't empty { System.Drawing.Point p = pointQueue.Dequeue(); //add all neighboring points to the a list pList[0] = (new System.Drawing.Point(p.X, p.Y - 1)); //above pList[1] = (new System.Drawing.Point(p.X, p.Y + 1)); //below pList[2] = (new System.Drawing.Point(p.X - 1, p.Y)); //left pList[3] = (new System.Drawing.Point(p.X + 1, p.Y)); //right foreach (System.Drawing.Point neighbor in pList) { if (!(Utility.IsBound(image, neighbor.X, neighbor.Y))) { continue; } color = imBackground[neighbor.Y, neighbor.X]; if (Utility.IsEqual(white, color) && (Utility.Distance(imImage[neighbor.Y, neighbor.X], bgrTarget) < threshold)) //and hasn't been seen before { imBackground[neighbor.Y, neighbor.X] = gray; //set as added to the queue pointQueue.Enqueue(neighbor); //and add to the queue } } imBackground[p.Y, p.X] = mask_color; //set the pixel to hot pink } return(imBackground.ToBitmap()); }
static void Scan(Image <Bgr, byte> img, Image <Bgr, byte> result, int COLOR_DIFF, int DIST) { Bgr c1, c2; for (int i = 0; i < img.Rows; i++) { int flagj = 0; for (int j = 0; j < img.Cols && j < img.Cols - 1; j++) { c1 = img[i, j]; c2 = img[i, j + 1]; if (Math.Pow(Math.Pow(c1.Blue - c2.Blue, 2) + Math.Pow(c1.Green - c2.Green, 2) + Math.Pow(c1.Red - c2.Red, 2), 0.5) <= COLOR_DIFF) { flagj++; } else if (flagj >= DIST) { //cout << "find edge " << j << "," << i << endl; //result.at<Vec3b>(i, j).val[0] = 0; //result.at<Vec3b>(i, j).val[1] = 0; //result.at<Vec3b>(i, j).val[2] = 255; result[i, j] = new Bgr(0, 0, 255); flagj = 0; } } } for (int j = 0; j < img.Cols; j++) { int flagi = 0; for (int i = 0; i < img.Rows && i < img.Rows - 1; i++) { c1 = img[i, j]; c2 = img[i + 1, j]; if (Math.Pow(3 * Math.Pow(c1.Blue - c2.Blue, 2) + 4 * Math.Pow(c1.Green - c2.Green, 2) + 2 * Math.Pow(c1.Red - c2.Red, 2), 0.5) <= COLOR_DIFF) { flagi++; } else if (flagi >= DIST) { //result.at<Vec3b>(i, j).val[0] = 0; //result.at<Vec3b>(i, j).val[1] = 0; //result.at<Vec3b>(i, j).val[2] = 255; result[i, j] = new Bgr(0, 0, 255); flagi = 0; } } } //CvInvoke.Imshow("scan", result); }
private Image <Bgr, byte> CreateMargedImage( Image <Bgr, byte> firstImage, Image <Bgr, byte> secondImage, Dictionary <KeyPoint, KeyPoint> pairs, KeyPairsTriangle ransacTriangle) { // Marge image. var margedImageWidth = firstImage.Width + secondImage.Width; var margedImageHeight = Math.Max(firstImage.Height, secondImage.Height); var imageResult = new Image <Bgr, byte>(margedImageWidth, margedImageHeight) { ROI = new Rectangle(0, 0, firstImage.Width, firstImage.Height) }; firstImage.CopyTo(imageResult); imageResult.ROI = new Rectangle(firstImage.Width, 0, secondImage.Width, secondImage.Height); secondImage.CopyTo(imageResult); imageResult.ROI = Rectangle.Empty; // Use Ransac. if (ransacTriangle != null) { var firstTriangle = ransacTriangle.GetFirstTriangle(); var secondTriangle = ransacTriangle.GetSecondTriangle(); var triangleColor = new Bgr(Color.Black).MCvScalar; CvInvoke.Line(imageResult, firstTriangle[0], firstTriangle[1], triangleColor, 2); CvInvoke.Line(imageResult, firstTriangle[1], firstTriangle[2], triangleColor, 2); CvInvoke.Line(imageResult, firstTriangle[2], firstTriangle[0], triangleColor, 2); CvInvoke.Line(imageResult, secondTriangle[0].ChangeOffset(firstImage.Width), secondTriangle[1].ChangeOffset(firstImage.Width), triangleColor, 2); CvInvoke.Line(imageResult, secondTriangle[1].ChangeOffset(firstImage.Width), secondTriangle[2].ChangeOffset(firstImage.Width), triangleColor, 2); CvInvoke.Line(imageResult, secondTriangle[2].ChangeOffset(firstImage.Width), secondTriangle[0].ChangeOffset(firstImage.Width), triangleColor, 2); } // Draw lines. foreach (var _pair in pairs) { var start = new Point((int)_pair.Key.X, (int)_pair.Key.Y); var second = new Point((int)_pair.Value.X + firstImage.Width, (int)_pair.Value.Y); CvInvoke.Line(imageResult, start, second, new Bgr(_pair.Key.Color).MCvScalar, 1); } return(imageResult); }
public Image <Bgr, byte> resizeimage(Image <Bgr, byte> image, double k1, double k2) { Image <Bgr, byte> resizedimage = new Image <Bgr, byte>((int)(image.Width * k1), (int)(image.Height * k2)); for (int i = 0; i < resizedimage.Width - 1; i++) { for (int j = 0; j < resizedimage.Height - 1; j++) { double I = (i / k1); double J = (j / k2); double baseI = Math.Floor(I); double baseJ = Math.Floor(J); if (baseI >= image.Width - 1) { continue; } if (baseJ >= image.Height - 1) { continue; } double rI = I - baseI; double rJ = J - baseJ; double irI = 1 - rI; double irJ = 1 - rJ; Bgr c1 = new Bgr(); c1.Blue = image.Data[(int)baseJ, (int)baseI, 0] * irI + image.Data[(int)baseJ, (int)(baseI + 1), 0] * rI; c1.Green = image.Data[(int)baseJ, (int)baseI, 1] * irI + image.Data[(int)baseJ, (int)(baseI + 1), 1] * rI; c1.Red = image.Data[(int)baseJ, (int)baseI, 2] * irI + image.Data[(int)baseJ, (int)(baseI + 1), 2] * rI; Bgr c2 = new Bgr(); c2.Blue = image.Data[(int)(baseJ + 1), (int)baseI, 0] * irI + image.Data[(int)(baseJ + 1), (int)(baseI + 1), 0] * rI; c2.Green = image.Data[(int)(baseJ + 1), (int)baseI, 1] * irI + image.Data[(int)(baseJ + 1), (int)(baseI + 1), 1] * rI; c2.Red = image.Data[(int)(baseJ + 1), (int)baseI, 2] * irI + image.Data[(int)(baseJ + 1), (int)(baseI + 1), 2] * rI; Bgr c = new Bgr(); c.Blue = c1.Blue * irJ + c2.Blue * rJ; c.Green = c1.Green * irJ + c2.Green * rJ; c.Red = c1.Red * irJ + c2.Red * rJ; resizedimage[j, i] = c; } } return(resizedimage); }
private void Application_Idle(object sender, EventArgs e) { Mat a = cap.QueryFrame(); if (a != null) { System.Threading.Thread.Sleep((int)(1000.0 / fps - 5)); imageBox1.Image = a; GC.Collect(); } if (a != null) { textBox1.Text = ""; pictureBox1.Image = imageBox1.Image.Bitmap; Bgr drawColor = new Bgr(Color.Blue); try { Image <Bgr, Byte> image = new Image <Bgr, byte>(new Bitmap(pictureBox1.Image)); Image <Gray, byte> gray = image.Convert <Gray, Byte>(); CvInvoke.GaussianBlur(gray, gray, new Size(3, 3), 1); gray._EqualizeHist();//均衡化 using (gray) { _ocr.SetImage(gray); _ocr.Recognize(); Tesseract.Character[] charactors = _ocr.GetCharacters(); foreach (Tesseract.Character c in charactors) { image.Draw(c.Region, drawColor, 1); } imageBox1.Image = image; String text = _ocr.GetUTF8Text(); label1.Text = text; for (int i = 0; i < charactors.Length; i++) { this.textBox1.Text += charactors[i].Text; } } } catch (Exception exception) { MessageBox.Show(exception.Message); } } }
public void TestKMeans() { int clustersCount = 5; int sampleCount = 300; int imageSize = 500; Bgr[] colors = new Bgr[] { new Bgr(0, 0, 255), new Bgr(0, 255, 0), new Bgr(255, 100, 100), new Bgr(255, 0, 255), new Bgr(0, 255, 255) }; Image <Bgr, Byte> image = new Image <Bgr, byte>(imageSize, imageSize); #region generate random samples Matrix <float> points = new Matrix <float>(sampleCount, 1, 2); Matrix <int> clusters = new Matrix <int>(sampleCount, 1); Random r = new Random(); for (int i = 0; i < clustersCount; i++) { Matrix <float> row = points.GetRows(i * (sampleCount / clustersCount), (i + 1) * (sampleCount / clustersCount), 1); row.SetRandNormal(new MCvScalar(r.Next() % imageSize, r.Next() % imageSize), new MCvScalar((r.Next() % imageSize) / 6, (r.Next() % imageSize) / 6)); } using (ScalarArray ia = new ScalarArray(new MCvScalar())) { CvInvoke.AbsDiff(points, ia, points); } CvInvoke.RandShuffle(points, 1.0, 0); #endregion CvInvoke.Kmeans( points, 2, clusters, new MCvTermCriteria(10, 1.0), 5, CvEnum.KMeansInitType.PPCenters); for (int i = 0; i < sampleCount; i++) { PointF p = new PointF(points.Data[i, 0], points.Data[i, 1]); image.Draw(new CircleF(p, 1.0f), colors[clusters[i, 0]], 1); } //Emgu.CV.UI.ImageViewer.Show(image); }
static void DoKmeans(string imagePath) { Bgr[] clusterColors = new Bgr[] { new Bgr(0, 0, 255), new Bgr(0, 255, 0), new Bgr(255, 100, 100), new Bgr(255, 0, 255), new Bgr(133, 0, 99), new Bgr(130, 12, 49), new Bgr(0, 255, 255) }; var srcImage = new Image <Bgr, float>(imagePath); Matrix <float> samples = new Matrix <float>(srcImage.Rows * srcImage.Cols, 1, 3); Matrix <int> finalClusters = new Matrix <int>(srcImage.Rows * srcImage.Cols, 1); for (int y = 0; y < srcImage.Rows; y++) { for (int x = 0; x < srcImage.Cols; x++) { samples.Data[y + x * srcImage.Rows, 0] = (float)srcImage[y, x].Blue; samples.Data[y + x * srcImage.Rows, 1] = (float)srcImage[y, x].Green; samples.Data[y + x * srcImage.Rows, 2] = (float)srcImage[y, x].Red; } } MCvTermCriteria term = new MCvTermCriteria(100, 0.5) { Type = TermCritType.Eps | TermCritType.Iter }; int clusterCount = 5; int attempts = 5; CvInvoke.Kmeans(samples, clusterCount, finalClusters, term, attempts, KMeansInitType.PPCenters); Image <Bgr, float> outputImage = new Image <Bgr, float>(srcImage.Size); for (int y = 0; y < srcImage.Rows; y++) { for (int x = 0; x < srcImage.Cols; x++) { PointF p = new PointF(x, y); outputImage.Draw(new CircleF(p, 1.0f), clusterColors[finalClusters[y + x * srcImage.Rows, 0]], 1); } } outputImage.Save($"kmeans-{imagePath}"); }
public FormGroupMatch() { InitializeComponent(); debugImage = new Bgr <byte> [pictureBox.Height, pictureBox.Width]; var detections = getDetections(); drawDetections(detections, Bgr <byte> .Red, 1); groupMatching = new RectangleClustering(); var clusters = groupMatching.Group(detections); drawDetections(clusters.Select(x => x.Representative), Bgr <byte> .Green, 3); pictureBox.Image = debugImage.ToBitmap(); }
private void button2_Click(object sender, EventArgs e) { double blue_min = double.Parse(txt_BlueMin.Text); double blue_max = double.Parse(txt_BlueMax.Text); double green_min = double.Parse(txt_GreenMin.Text); double green_max = double.Parse(txt_GreenMax.Text); double red_min = double.Parse(txt_RedMin.Text); double red_max = double.Parse(txt_RedMax.Text); Bgr min = new Bgr(blue_min, green_min, red_min); //黄色的最小值,允许一定的误差。 Bgr max = new Bgr(blue_max, green_max, red_max); //黄色的最大值,允许一定的误差。 Image <Gray, byte> result = bgr_img.InRange(min, max); //进行颜色提取。 pictureBox2.Image = bgr_img.Bitmap; //显示输入图像。 imageBox2.Image = result; //显示提取颜色区域。 }
// compare two Emgu CV colors using ColorMine, an API for C# public static int colorDifference(Bgr c1, Bgr c2) { var myRGB1 = new ColorMine.ColorSpaces.Rgb { R = c1.Red, G = c1.Green, B = c1.Blue }; var myRGB2 = new ColorMine.ColorSpaces.Rgb { R = c2.Red, G = c2.Green, B = c2.Blue }; return (int)myRGB1.Compare(myRGB2, new Cie1976Comparison()); }