private CardColor ClassifyColor(Bgr avgBgr, Hsv avgHsv) { if (avgHsv.Satuation < 30) { return CardColor.White; } else if (avgHsv.Satuation < 45) { return CardColor.Other; } else if (avgBgr.Red > avgBgr.Blue && avgBgr.Red > avgBgr.Green) { return CardColor.Red; } else if (avgBgr.Green > avgBgr.Blue && avgBgr.Green > avgBgr.Red) { return CardColor.Green; } else if (avgBgr.Green < avgBgr.Blue && avgBgr.Green < avgBgr.Red) { return CardColor.Purple; } else { return CardColor.White; } }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture("C:/Users/L33549.CITI/Desktop/a.avi"); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; //detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 129, 40); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); ellip = new Ellipse(); contourStorage = new MemStorage(); approxStorage = new MemStorage(); hullStorage = new MemStorage(); defectsStorage = new MemStorage(); tipPts = new Point[MAX_POINTS]; // coords of the finger tips foldPts = new Point[MAX_POINTS]; // coords of the skin folds between fingers depths = new float[MAX_POINTS]; // distances from tips to folds cogPt = new Point(); fingerTips = new List<Point>(); face = new CascadeClassifier("C:/Users/L33549.CITI/Desktop/AbuseAnalysis/HandGestureRecognition/HandGestureRecognition/HandGestureRecognition/haar/Original/haarcascade_hand.xml"); Application.Idle += new EventHandler(FrameGrabber); /*foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Turn on the color stream to receive color frames this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); // Allocate space to put the pixels we'll receive this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // This is the bitmap we'll display on-screen this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data //this.Image.Source = this.colorBitmap; // Add an event handler to be called whenever there is new color frame data this.sensor.ColorFrameReady += this.SensorColorFrameReady; // Start the sensor! this.sensor.Start(); }*/ }
internal static void ToColorSpace(IRgb color, IHsb item) { var hsv = new Hsv(); HsvConverter.ToColorSpace(color, hsv); item.H = hsv.H; item.S = hsv.S; item.B = hsv.V; }
public Form1() { InitializeComponent(); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); mv = new MCvMoments(); }
internal static IRgb ToColor(IHsb item) { var hsv = new Hsv { H = item.H, S = item.S, V = item.B }; return HsvConverter.ToColor(hsv); }
public Form1() { InitializeComponent(); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); mv = new MCvMoments(); pt.X = loaded.Width / 2; pt.Y = loaded.Height / 2; }
public CardColor Classify(Hsv hsv) { Matrix<float> toClassify = new Matrix<float>(1, 3); toClassify[0, 0] = (float)hsv.Hue; toClassify[0, 1] = (float)hsv.Satuation; toClassify[0, 2] = (float)hsv.Value; Matrix<float> classification = new Matrix<float>(1, 1); hsvClassifier.FindNearest(toClassify, 5, classification, null, null, null); CardColor color = (CardColor)classification[0, 0]; return color; }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture(@".\..\..\..\M2U00253.MPG"); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); ellip = new Ellipse(); Application.Idle += new EventHandler(FrameGrabber); }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture(); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); // ellip = new Ellipse(); _face = new HaarCascade("haarcascade_frontalface_alt_tree.xml"); eyes = new HaarCascade("haarcascade_mcs_eyepair_big.xml"); reye = new HaarCascade("haarcascade_mcs_lefteye.xml"); leye = new HaarCascade("haarcascade_mcs_righteye.xml"); label1.Hide(); }
private void fileNameToColors(string filename, out Bgr bgr, out Hsv hsv) { //This is a filename: //B146, G159, R136=Green - H123, S80, V178=Purple VERDICT=Purple.png string[] parts = filename.Split(' ', '=');//, 'B', 'G', 'R', 'H', 'S', 'V' /* * With this splitting, * parts[0] = B-value, * parts[1] = G-value, * parts[2] = R-value, * * parts[3] = H-value, * parts[4] = S-value, * parts[5] = V-value, */ try { string B = parts[0].Replace(',', ' ').Substring(1); string G = parts[1].Replace(',', ' ').Substring(1); string R = parts[2].Replace(',', ' ').Substring(1); string H = parts[5].Replace(',', ' ').Substring(1); string S = parts[6].Replace(',', ' ').Substring(1); string V = parts[7].Replace(',', ' ').Substring(1); int b = int.Parse(B); int g = int.Parse(G); int r = int.Parse(R); int h = int.Parse(H); int s = int.Parse(S); int v = int.Parse(V); bgr = new Bgr(b, g, r); hsv = new Hsv(h, s, v); } catch (Exception) { bgr = new Bgr(); hsv = new Hsv(); } }
private void GrabHandler(object sender, EventArgs e) { if (!webcam.IsOpened) { return; } imgBgr = new Mat(); webcam.Retrieve(imgBgr); imgIn = imgBgr.Clone(); leftMarkerMin = MakeHsv(leftHsvMin); rightMarkerMin = MakeHsv(rightHsvMin); leftMarkerMax = MakeHsv(leftHsvMax); rightMarkerMax = MakeHsv(rightHsvMax); CvInvoke.CvtColor(imgIn, imgIn, ColorConversion.Bgr2Hsv); CvInvoke.GaussianBlur(imgIn, imgIn, new Size(5, 5), 0); var rectangle = GetColorRectangle(leftMarkerMin, leftMarkerMax, 0); var rectangle2 = GetColorRectangle(rightMarkerMin, rightMarkerMax, 1); PointF cl, cr; if (rectangle.HasValue && rectangle2.HasValue) { cl = rectangle.Value.Center; cr = rectangle2.Value.Center; // Calculate new angles average float angle = Mathf.Atan2(cr.Y - cl.Y, cr.X - cl.X) * Mathf.Rad2Deg; rotations.PushBack(angle); float angleAverage = 0.0f; foreach (var f in rotations.data) { angleAverage += f; } angleAverage /= rotations.curLength; SteeringAngle = angleAverage / 90; bool hopping = SteeringAngle > 1 || SteeringAngle < -1; HopPressed = hopping && !HopHeld; HopHeld = hopping; // Calculate new delta-positions average Vector3 currentCenter = (new Vector2(cl.X, cl.Y) + new Vector2(cr.X, cr.Y)) / 2; positions.PushBack(currentCenter); PointF[] array1 = rectangle.Value.GetVertices(); PointF[] array2 = rectangle2.Value.GetVertices(); PointF[] newArray = new PointF[array1.Length + array2.Length]; Array.Copy(array1, newArray, array1.Length); Array.Copy(array2, 0, newArray, array1.Length, array2.Length); var boundRec = CvInvoke.MinAreaRect(newArray); var recSize = boundRec.Size.Height + boundRec.Size.Width; var pedal = recSize - BrakeTr; Brake = pedal < 0; Accelerate = pedal / range; if (cam != null) { DrawPointsFRectangle(boundRec.GetVertices(), imgBgr); CvInvoke.Line(imgBgr, new Point((int)rectangle.Value.Center.X, (int)rectangle.Value.Center.Y), new Point((int)rectangle2.Value.Center.X, (int)rectangle2.Value.Center.Y), new MCvScalar(0, 100, 0), 3, (LineType)8, 0); cam.texture = Utils.ConvertMatToTex2D(imgBgr, texture, texture.width, texture.height); } //DrawPointsFRectangle(boundRec.GetVertices(), imgBgr); //CvInvoke.Imshow("azeCam", imgBgr); //CvInvoke.WaitKey(24); } else { Accelerate = 0; Brake = false; HopHeld = false; HopPressed = false; } }
//, Hsv colorGood, Hsv colorBad) /*protected TabletColors InRange(int srcPeak, Hsv[,] allHsvs, int type) { if (type == 0) { } foreach(Hsv allHsv in allHsvs) { //if } }*/ /// <summary> /// This function allows you to remove everything(change to black) in a image apart from the color you want(change to white) /// </summary> /// <param name="src"> /// Image that you want to remove every color apart from yours /// </param> /// <param name="targetHsv"> /// THe color range you don't want to be removed, need specify high and low values, uses HSV color range /// </param> /// <returns> /// Returns the image with everything removed apart from what you wanted /// </returns> protected Image<Bgr, Byte> RemoveEverythingButRange(Image<Bgr, Byte> src, Hsv[] targetHsv) { Image<Hsv, Byte> temp = src.Convert<Hsv, Byte>(); Image<Bgr, Byte> dst; Hsv currentPixcelHSV; Hsv colorGood = new Hsv();//color we use to replace the correct data colorGood.Hue = 0; colorGood.Value = 255; colorGood.Satuation = 0; Hsv colorBad = new Hsv();//color we use to replace the not wanted colors colorBad.Hue = 0; colorBad.Value = 0; colorBad.Satuation = 0; for (int i = 0; i < src.Cols; i++) { for (int j = 0; j < src.Rows; j++) { currentPixcelHSV = temp[j, i]; if (InHSVRange(currentPixcelHSV, targetHsv, 0, 0) == true) {//make white if we are in range temp[j, i] = colorGood; } else {//make black otherwise temp[j, i] = colorBad; } } } dst = temp.Convert<Bgr, Byte>();//covert back to BGR return dst; }
/// <summary> /// This function uses the histogram to see if there is more then one color on the tablet we are looking at /// </summary> /// <param name="hue"> /// The hue spikes on the historgram /// </param> /// <param name="sat"> /// The saturation spikes on the historgram /// </param> /// <param name="val"> /// The value spikes on the historgram /// </param> /// <param name="circle">Not used</param> /// <param name="circles">Not used</param> /// <returns> /// True if tablet is ok and is a color we know, Flase if a unknown tablet or overlaped by other tablets /// </returns> protected bool FirstPass(int[][] hue, int[][] sat, int[][] val, CircleF circle, CircleF[] circles, Hsv[,] HSVTabletcolorsRanges) { if ((val.GetLength(0) == 0) || (sat.GetLength(0) == 0) || (hue.GetLength(0) == 0)) { return false; } if ((hue.GetLength(0) == 1) && (sat.GetLength(0) == 1) && (val.GetLength(0) == 1)) { TabletColors a = detectcolor(new Hsv((hue[0][0] + hue[0][1]) / 2, (sat[0][0] + sat[0][1]) / 2, (val[0][0] + val[0][1]) / 2), HSVTabletcolorsRanges); if (a != TabletColors.Unknown) return true; else return false; } else { TabletColors b = detectcolor(new Hsv((hue[0][0] + hue[0][1]) / 2, (sat[0][0] + sat[0][1]) / 2, (val[0][0] + val[0][1]) / 2), HSVTabletcolorsRanges); int hueM = hue.GetLength(0) - 1; int satM = sat.GetLength(0) - 1; int valM = val.GetLength(0) - 1; TabletColors c = detectcolor(new Hsv((hue[hueM][0] + hue[hueM][1]) / 2, (sat[satM][0] + sat[satM][1]) / 2, (val[valM][0] + val[valM][1]) / 2), HSVTabletcolorsRanges); if (hue.GetLength(0) > 2) { hueM = ((hue.GetLength(0) - 1) / 2); } if (sat.GetLength(0) > 2) { satM = ((sat.GetLength(0) - 1) / 2); } if (val.GetLength(0) > 2) { valM = ((val.GetLength(0) - 1) / 2); } TabletColors d = detectcolor(new Hsv((hue[hueM][0] + hue[hueM][1]) / 2, (sat[satM][0] + sat[satM][1]) / 2, (val[valM][0] + val[valM][1]) / 2), HSVTabletcolorsRanges); if ((b != TabletColors.Unknown) && (b == c) && (b == d)) return true; else return false; } }
/// <summary> /// This function tells us what color the source image is /// </summary> /// <param name="src"> /// Input image of whichw e want to determin the color /// </param> /// <param name="HSVTabletcolorRange"> /// 2d array containing all the color ranges of tablets also has min and max for each color, uses HSV color range /// </param> /// <returns> /// Returns the color of the tablet with enum TabletColors /// </returns> /// <todo> /// add the ol, oh into the function, this function will not be used in the future /// </todo> protected TabletColors detectcolor(Image<Bgr, byte> src, Hsv[,] HSVTabletcolorRange) { int ol = 5; int oh = 5; MCvScalar srcScalar; Image<Hsv, byte> hsv = src.Convert<Hsv, byte>(); Hsv abc; hsv.AvgSdv(out abc, out srcScalar); //TabletColors if (true == InHSVRange(abc, HSVTabletcolorRange, TabletColors.Green, ol, oh)) {//green return TabletColors.Green; } else if (true == InHSVRange(abc, HSVTabletcolorRange, TabletColors.Red, ol, oh)) {//red return TabletColors.Red; } else if (true == InHSVRange(abc, HSVTabletcolorRange, TabletColors.White, ol, oh)) {//white return TabletColors.White; } else if (true == InHSVRange(abc, HSVTabletcolorRange, TabletColors.Blue, ol, oh)) {//blue return TabletColors.Blue; } else if (true == InHSVRange(abc, HSVTabletcolorRange, TabletColors.Black, ol, oh)) {//black return TabletColors.Black; } else { return TabletColors.Unknown; } }
public Image<Gray, Byte> getBlueHsvMask(Image<Hsv, Byte> src, Hsv blue_min, Hsv blue_max) { Image<Gray, Byte> TMP = new Image<Gray, byte>(src.Width, src.Height); TMP = src.InRange(blue_min, blue_max); return TMP; }
/// <summary> /// TODO: This method needs a unit test. /// This is experimental method to explore colour rendering of standard spectrograms /// Used to convert a standard decibel spectrogram into a colour version using /// a colour rendering for three separate properties. /// </summary> /// <param name="dbSpectrogramData">the raw decibel spectrogram data - assigned to red channel</param> /// <param name="nrSpectrogramData">the noise reduced decibel spectrogram data - assigned to green channel</param> /// <param name="hits">assigned to ridge colours</param> /// <returns>coloured-rendered spectrogram as image</returns> public static Image <Rgb24> CreateFalseColourDecibelSpectrogram(double[,] dbSpectrogramData, double[,] nrSpectrogramData, byte[,] hits) { double truncateMin = -120.0; double truncateMax = -30.0; double filterCoefficient = 1.0; double[,] dbSpectrogramNorm = NormaliseSpectrogramMatrix(dbSpectrogramData, truncateMin, truncateMax, filterCoefficient); truncateMin = 0; truncateMax = 60; double[,] nrSpectrogramNorm = NormaliseSpectrogramMatrix(nrSpectrogramData, truncateMin, truncateMax, filterCoefficient); int width = dbSpectrogramData.GetLength(0); int height = dbSpectrogramData.GetLength(1); Image <Rgb24> image = new Image <Rgb24>(width, height); var converter = new SixLabors.ImageSharp.ColorSpaces.Conversion.ColorSpaceConverter(); Color[] ridgeColours = { Color.Red, Color.DarkMagenta, Color.Black, Color.LightPink }; // for all freq bins for (int y = 0; y < height; y++) { //for pixels in freq bin for (int x = 0; x < width; x++) { // NormaliseMatrixValues and bound the value - use min bound, max and 255 image intensity range double dbValue = dbSpectrogramNorm[x, y]; int c1 = 255 - (int)Math.Floor(255.0 * dbValue); //original version //int c1 = (int)Math.Floor(255.0 * dbValue); if (c1 < 0) { c1 = 0; } else if (c1 > 255) { c1 = 255; } var colour = Color.FromRgb((byte)c1, (byte)c1, (byte)c1); if (nrSpectrogramNorm[x, y] > 0) { // use HSV colour space int bottomColour = 30; // to avoid using the reds int topColour = 320; // to avoid using the magentas int hueRange = topColour - bottomColour; int hue = bottomColour + (int)Math.Floor(hueRange * nrSpectrogramNorm[x, y]); double saturation = 1.0; //double saturation = 0.75 + (nrSpectrogramNorm[x, y] * 0.25); //double saturation = nrSpectrogramNorm[x, y] * 0.5; //double saturation = (1 - nrSpectrogramNorm[x, y]) * 0.5; //Convert HSV color space to RGB // for this require instance of a SixLabors colour converter. var myHsv = new Hsv(hue, (float)saturation, 1.0f); var myRgb = converter.ToRgb(myHsv); colour = Color.FromRgb((byte)(myRgb.R * 255), (byte)(myRgb.G * 255), (byte)(myRgb.B * 255)); // get colour for noise reduced portion // superimpose ridge detection // Have experimented with a bunch of ideas if (hits[x, y] > 0) { //value = 0.60 + (nrSpectrogramNorm[x, y] * 0.40); //myHsv = new Hsv { H = 260, S = saturation, V = value }; //myRgb = myHsv.To<Rgb>(); //colour = Color.FromRgb((int)myRgb.R, (int)myRgb.G, (int)myRgb.B); colour = ridgeColours[hits[x, y] - 1]; } } image[x, height - y - 1] = colour; } } // freq bins return(image); }
/// <summary> /// Converts a <see cref="Hsv"/> into a <see cref="CieXyy"/> /// </summary> /// <param name="color">The color to convert.</param> /// <returns>The <see cref="CieXyy"/></returns> public CieXyy ToCieXyy(Hsv color) { var xyzColor = this.ToCieXyz(color); return(this.ToCieXyy(xyzColor)); }
// Conversion tool for reference: // https://www.easyrgb.com/en/convert.php#inputFORM public IEnumerable <object[]> GetData(MethodInfo methodInfo) { yield return(new object[] { new ColorData { Name = "Black", Rgb = new Rgb(0, 0, 0), Hsl = new Hsl(0, 0, 0), Hsv = new Hsv(0, 0, 0), Cmyk = new Cmyk(0, 0, 0, 1), Xyz = new Xyz(0, 0, 0), Lab = new Lab(0, 0, 0) } }); yield return(new object[] { new ColorData { Name = "White", Rgb = new Rgb(1, 1, 1), Hsl = new Hsl(0, 0, 1), Hsv = new Hsv(0, 0, 1), Cmyk = new Cmyk(0, 0, 0, 0), Xyz = new Xyz(95.047, 100, 108.883), Lab = new Lab(100, 0, 0) } }); yield return(new object[] { new ColorData { Name = "Red", Rgb = new Rgb(1, 0, 0), Hsl = new Hsl(0, 1, 0.5), Hsv = new Hsv(0, 1, 1), Cmyk = new Cmyk(0, 1, 1, 0), Xyz = new Xyz(41.2456, 21.2673, 1.9334), Lab = new Lab(53.2408, 80.0925, 67.2032) } }); yield return(new object[] { new ColorData { Name = "Green", Rgb = new Rgb(0, 1, 0), Hsl = Hsl.FromScaledValues(120, 100, 50), Hsv = Hsv.FromScaledValues(120, 100, 100), Cmyk = new Cmyk(1, 0, 1, 0), Xyz = new Xyz(35.7576, 71.5152, 11.9192), Lab = new Lab(87.7347, -86.1827, 83.1793) } }); yield return(new object[] { new ColorData { Name = "Blue", Rgb = new Rgb(0, 0, 1), Hsl = Hsl.FromScaledValues(240, 100, 50), Hsv = Hsv.FromScaledValues(240, 100, 100), Cmyk = new Cmyk(1, 1, 0, 0), Xyz = new Xyz(18.0437, 7.2175, 95.0304), Lab = new Lab(32.297, 79.1875, -107.8602) } }); yield return(new object[] { new ColorData { Name = "Material Blue", Rgb = Rgb.FromScaledValues(33, 150, 243), Hsl = Hsl.FromScaledValues(206.5716, 89.744, 54.118), Hsv = Hsv.FromScaledValues(206.5716, 86.42, 95.294), Cmyk = Cmyk.FromScaledValues(86.419753, 38.271604, 0, 4.705882), Xyz = new Xyz(27.704960365063513, 28.60350077011483, 88.83745176406208), Lab = new Lab(60.4301, 2.0799, -55.1094) } }); yield return(new object[] { new ColorData { Name = "Purple HEX to HSL/HSV issue", Rgb = Rgb.FromScaledValues(116, 58, 111), Hsl = Hsl.FromScaledValues(305.172, 33.333, 34.118), Hsv = Hsv.FromScaledValues(305.172, 50, 45.49), Cmyk = Cmyk.FromScaledValues(0, 50, 4.310, 54.510), Xyz = new Xyz(11.5846, 7.8875, 15.9481), Lab = new Lab(33.7475, 33.4761, -19.6542) } }); }
/// <summary> /// Converts a <see cref="Hsv"/> into a <see cref="HunterLab"/> /// </summary> /// <param name="color">The color to convert.</param> /// <returns>The <see cref="HunterLab"/></returns> public HunterLab ToHunterLab(Hsv color) { var xyzColor = this.ToCieXyz(color); return(this.ToHunterLab(xyzColor)); }
void ParseJSON() { #region description /* How the JSON is set up: * * [...] * x : VALUE * y : VALUE * z : VLAUE * [...] * * We need x and z. * The StreamReader reads it like this: * x * VALUE * y * VALUE * z * VALUE * .. So we look for "x" only (skiptoX = true), if we find it we use the next reading iteration for its value (thats why there is the boolean "lastX"), skip 3 iterations (for y ; VALUE; z;) and take the next value. * */ #endregion bool skiptoX = true, lastX = false; byte count = 0; long x = 0, y = 0; JsonTextReader streamReader = new JsonTextReader(new StreamReader(settings.Path_json)); while (streamReader.Read()) { if (streamReader.Value != null) { if (skiptoX) { if (String.Equals(streamReader.Value, "x")) { lastX = true; skiptoX = false; } } else if (lastX) { x = Convert.ToInt64(streamReader.Value); lastX = false; count = 1; } else if (count > 0 && count < 4) // Count 3 Iterations (we starts with count = 1) { count++; } else if (count == 4) // After 3 Iterations, we read the value and pass it and x to AddToBitmap method while displaying the process on the Console. { y = Convert.ToInt64(streamReader.Value); skiptoX = true; count = 0; int _x = ((int)x / settings.Ly_to_px + settings.Img_X_offset); int _y = ((int)y / settings.Ly_to_px + settings.Img_Y_offset); if (_x > 0 && _y > 0 && _x < settings.Img_Xres && _y < settings.Img_Yres) { sysmap[_x - 1][(_y - 1) + 2 * (settings.Img_Yres / 2 - _y)]++; } counter++; if (counter % 10045 == 0) { Dispatcher.BeginInvoke((Action)(() => { Write($"{counter} Systems parsed."); })); } } } } Dispatcher.BeginInvoke((Action)(() => { Write($"{counter} Systems parsed. \n Applying to Bitmap."); })); // Plot 2dArray onto bitmap, using selected method bitmap = new FastBitmap((int)settings.Img_Xres, (int)settings.Img_Yres); /* * 1:Greyscale * 2:Hue * 3:Hue+Value * 4:r * 5:g * 6:b * 7:a */ #region using Greyscale if (settings.Rendering == 1 || settings.Rendering == 0) { int index = 0; bitmap.Clear(FastColor.black); for (int iteration_x = 0; iteration_x < sysmap.Length; iteration_x++) { for (int iteration_y = 0; iteration_y < sysmap[iteration_x].Length; iteration_y++) { uint value = Convert.ToUInt32(sysmap[iteration_x][iteration_y] * settings.Systems_per_ColorVal); byte greyscale; if (value > 255) { greyscale = 255; } else { greyscale = (byte)value; } bitmap.SetPixel(iteration_x + 1, iteration_y + 1, new FastColor(greyscale, greyscale, greyscale)); } } index++; if (index % 10045 == 0) { Dispatcher.BeginInvoke((Action)(() => { Write($"{index} pixels set."); })); } } #endregion #region using Hue(but no Value) else if (settings.Rendering == 2) { int index = 0; bitmap.Clear(FastColor.black); for (int iteration_x = 0; iteration_x < sysmap.Length; iteration_x++) { for (int iteration_y = 0; iteration_y < sysmap[iteration_x].Length; iteration_y++) { uint value = Convert.ToUInt32(sysmap[iteration_x][iteration_y] * settings.Systems_per_ColorVal); if (value > 180) { value = value - 180; } // MOve Hue by 180 Degreen so blue is 0 and Red is maximum else { value = value + 180; } var color = new Hsv((value + 180) / 2, 1, 1).ToRgb(); bitmap.SetPixel(iteration_x + 1, iteration_y + 1, new FastColor((byte)color.R, (byte)color.G, (byte)color.B)); } } index++; if (index % 10045 == 0) { Dispatcher.BeginInvoke((Action)(() => { Write($"{index} pixels set."); })); } } #endregion #region using Hue and Value else if (settings.Rendering == 3) { int index = 0; bitmap.Clear(FastColor.black); for (int iteration_x = 0; iteration_x < sysmap.Length; iteration_x++) { for (int iteration_y = 0; iteration_y < sysmap[iteration_x].Length; iteration_y++) { uint value = Convert.ToUInt32(sysmap[iteration_x][iteration_y] * settings.Systems_per_ColorVal); uint value_greyscale; if (value > 360) { value_greyscale = 255; } else if (value > 255) { value_greyscale = 225; } else { value_greyscale = value; } var color = new Hsv((value + 180) / 2, 1, value_greyscale).ToRgb(); bitmap.SetPixel(iteration_x + 1, iteration_y + 1, new FastColor((byte)color.R, (byte)color.G, (byte)color.B)); } } index++; if (index % 10045 == 0) { Dispatcher.BeginInvoke((Action)(() => { Write($"{index} pixels set."); })); } } #endregion #region using Red else if (settings.Rendering == 4) { int index = 0; bitmap.Clear(FastColor.black); for (int iteration_x = 0; iteration_x < sysmap.Length; iteration_x++) { for (int iteration_y = 0; iteration_y < sysmap[iteration_x].Length; iteration_y++) { uint value = Convert.ToUInt32(sysmap[iteration_x][iteration_y] * settings.Systems_per_ColorVal); byte greyscale; if (value > 255) { greyscale = 255; } else { greyscale = (byte)value; } bitmap.SetPixel(iteration_x + 1, iteration_y + 1, new FastColor(greyscale, 0, 0)); } } index++; if (index % 10045 == 0) { Dispatcher.BeginInvoke((Action)(() => { Write($"{index} pixels set."); })); } } #endregion #region using Green else if (settings.Rendering == 5) { int index = 0; bitmap.Clear(FastColor.black); for (int iteration_x = 0; iteration_x < sysmap.Length; iteration_x++) { for (int iteration_y = 0; iteration_y < sysmap[iteration_x].Length; iteration_y++) { uint value = Convert.ToUInt32(sysmap[iteration_x][iteration_y] * settings.Systems_per_ColorVal); byte greyscale; if (value > 255) { greyscale = 255; } else { greyscale = (byte)value; } bitmap.SetPixel(iteration_x + 1, iteration_y + 1, new FastColor(0, greyscale, 0)); } } index++; if (index % 10045 == 0) { Dispatcher.BeginInvoke((Action)(() => { Write($"{index} pixels set."); })); } } #endregion #region using Blue else if (settings.Rendering == 6) { int index = 0; bitmap.Clear(FastColor.black); for (int iteration_x = 0; iteration_x < sysmap.Length; iteration_x++) { for (int iteration_y = 0; iteration_y < sysmap[iteration_x].Length; iteration_y++) { uint value = Convert.ToUInt32(sysmap[iteration_x][iteration_y] * settings.Systems_per_ColorVal); byte greyscale; if (value > 255) { greyscale = 255; } else { greyscale = (byte)value; } bitmap.SetPixel(iteration_x + 1, iteration_y + 1, new FastColor(0, 0, greyscale)); } } index++; if (index % 10045 == 0) { Dispatcher.BeginInvoke((Action)(() => { Write($"{index} pixels set."); })); } } #endregion string file = settings.Path_output + "/" + DateTime.Now.ToString("yyyy-MM-dd HH-mm-ss") + ".png"; bitmap.Save(file); bitmap = null; sysmap = null; Dispatcher.BeginInvoke((Action)(() => { Write($"Image was sucessfully generated."); loadingicon.Visibility = Visibility.Hidden; })); System.Diagnostics.Process.Start(file); // Display image with default Image Displaying Software }
public void Value_ShouldBeClamped() { Hsv.FromScaledValues(120, 0, -1).ScaledValue.ShouldBe(0); Hsv.FromScaledValues(120, 0, 101).ScaledValue.ShouldBe(100); }
public void Saturation_ShouldBeClamped() { Hsv.FromScaledValues(120, -1, 100).ScaledSaturation.ShouldBe(0); Hsv.FromScaledValues(120, 101, 100).ScaledSaturation.ShouldBe(100); }
// Detects Skin. Takes an image and returns a binary image with the background removed. // Skin is white and everything else will be black. public static Image <Bgr, Byte> GetSkin(Image <Bgr, Byte> image) { // allocate the result matrix Image <Bgr, Byte> dst = image.Clone(); Bgr cwhite = new Bgr(255, 255, 255); //Vec3b::all(255); Bgr cblack = new Bgr(0, 0, 0); //Vec3b::all(0); Image <Ycc, Byte> src_ycrcb = new Image <Ycc, Byte>(dst.Width, dst.Height); // = new Mat(); Image <Hsv, Single> src_hsv = new Image <Hsv, Single>(dst.Width, dst.Height); // = new Mat(); // OpenCV scales the YCrCb components, so that they // cover the whole value range of [0,255], so there's // no need to scale the values: Emgu.CV.CvInvoke.CvtColor(image, src_ycrcb, Emgu.CV.CvEnum.ColorConversion.Bgr2YCrCb); // OpenCV scales the Hue Channel to [0,180] for // 8bit images, so make sure we are operating on // the full spectrum from [0,360] by using floating // point precision: //image.ConvertTo(src_hsv, Emgu.CV.CvEnum.DepthType.Cv32F); Emgu.CV.CvInvoke.CvtColor(image, src_hsv, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv); // src_hsv to image // Now scale the values between [0,255]: Emgu.CV.CvInvoke.Normalize(src_hsv, src_hsv, 0.0, 255.0, Emgu.CV.CvEnum.NormType.MinMax, Emgu.CV.CvEnum.DepthType.Cv32F); for (int i = 0; i < image.Rows; i++) { for (int j = 0; j < image.Cols; j++) { //Vec3b pix_bgr = src.ptr<Vec3b>(i)[j]; Bgr pix_bgr = image[i, j]; int B = (int)pix_bgr.Blue; int G = (int)pix_bgr.Green; int R = (int)pix_bgr.Red; // apply rgb rule bool a = R1(R, G, B); //Vec3b pix_ycrcb = src_ycrcb.ptr<Vec3b>(i)[j]; Ycc pix_ycrcb = src_ycrcb[i, j]; float Y = (float)pix_ycrcb.Y; float Cr = (float)pix_ycrcb.Cr; float Cb = (float)pix_ycrcb.Cb; // apply ycrcb rule bool b = R2(Y, Cr, Cb); Hsv pix_hsv = src_hsv[i, j]; float H = (float)pix_hsv.Hue; float S = (float)pix_hsv.Satuation; float V = (float)pix_hsv.Value; // apply hsv rule bool c = R3(H, S, V); if (!(a && b && c)) { dst[i, j] = cblack; } else { dst[i, j] = cwhite; // Make skin white } } } return(dst); }
/// <summary> /// 讀取圖片的顏色(依面積最大) /// </summary> /// <param name="src">Image</param> /// <returns>String color</returns> public static string getcolor(Image <Bgr, byte> src) { //設定各種顏色的HSV範圍值 String rlt = ""; Dictionary <string, Hsv[]> colorrange = new Dictionary <string, Hsv[]>(); Hsv[] ListColor = null; ////black //Hsv blacklowerLimit = new Hsv(0, 0, 0); //Hsv blackupperLimit = new Hsv(180, 255, 50); //ListColor = new Hsv[] { blacklowerLimit, blackupperLimit }; //colorrange.Add("black", ListColor); ////gray //Hsv graylowerLimit = new Hsv(0, 0, 50); //Hsv grayupperLimit = new Hsv(180, 43, 220); //ListColor = new Hsv[] { graylowerLimit, grayupperLimit }; //colorrange.Add("gray", ListColor); //white Hsv whitelowerLimit = new Hsv(0, 0, 221); Hsv whiteupperLimit = new Hsv(180, 40, 255); ListColor = new Hsv[] { whitelowerLimit, whiteupperLimit }; colorrange.Add("white", ListColor); //Orange Hsv OrangelowerLimit = new Hsv(8, 40, 50); Hsv OrangewupperLimit = new Hsv(34, 255, 255); ListColor = new Hsv[] { OrangelowerLimit, OrangewupperLimit }; colorrange.Add("Orange", ListColor); //green Hsv greenlowerLimit = new Hsv(33, 40, 50); Hsv greenupperLimit = new Hsv(87, 255, 255); ListColor = new Hsv[] { greenlowerLimit, greenupperLimit }; colorrange.Add("green", ListColor); //blue Hsv bluelowerLimit = new Hsv(90, 40, 50); Hsv blueupperLimit = new Hsv(135, 255, 255); ListColor = new Hsv[] { bluelowerLimit, blueupperLimit }; colorrange.Add("blue", ListColor); //計算各顏色面積,並回傳最大面積之顏色名稱 Image <Hsv, Byte> hsvsrc = src.Clone().Convert <Hsv, Byte>(); double maxsumArea = 0; String colorD = "none"; foreach (var item in colorrange) { Image <Gray, Byte> mask_hsv = hsvsrc.InRange(item.Value[0], item.Value[1]); Image <Gray, Byte> ThB = null; ThB = mask_hsv.ThresholdBinary(new Gray(127), new Gray(255)); //Dilate the image Image <Gray, Byte> dilate = ThB.Dilate(2); VectorOfVectorOfPoint con = new VectorOfVectorOfPoint(); CvInvoke.FindContours(dilate, con, src, RetrType.External, ChainApproxMethod.ChainApproxSimple); double sumarea = 0; for (int i = 0; i < con.Size; i++) { //獲取獨立的連通輪廓 VectorOfPoint contour = con[i]; //計算連通輪廓的面積 sumarea = sumarea + CvInvoke.ContourArea(contour); } if (sumarea > maxsumArea) { colorD = item.Key; maxsumArea = sumarea; } } //Console.WriteLine("color::"+colorD); rlt = colorD; hsvsrc.Dispose(); return(rlt); }
public void Hue_ShouldBeClamped() { Hsv.FromScaledValues(-0.01, 100, 100).ScaledHue.ShouldBe(0); new Hsv(360.01, 100, 100).ScaledHue.ShouldBe(360); }
public static Color HsvToColor(Hsv hsv) { var argb = HsvToRgb(hsv); return(Color.FromArgb(argb.Alpha, argb.Red, argb.Green, argb.Blue)); }
private void ProcessFrame(object sender, EventArgs e) { //sw = Stopwatch.StartNew(); //Image<Bgr, byte> imgCon = new Image<Bgr, byte>(_capture.Width, _capture.Height); Image <Gray, byte> imgThresholded = new Image <Gray, byte>(_capture.Width, _capture.Height); Image <Gray, byte> imgGray2 = new Image <Gray, byte>(_capture.Width, _capture.Height); //Image<Bgr, byte> imgCon = imgGray.Convert<Bgr, byte>(); //UMat imgHSV = new UMat(); //UMat imgGray = new UMat(); Mat hier = new Mat(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); try { _capture.Retrieve(frame); System.Drawing.Point pCenter = new System.Drawing.Point(frame.Width / 2, frame.Height / 2); System.Drawing.Rectangle RectROI = new System.Drawing.Rectangle(pCenter.X - 215, pCenter.Y - 225, 435, 435); //CvInvoke.CvtColor(frame, imgGray, ColorConversion.Bgr2Gray); Image <Hsv, byte> imgHSV = new Image <Hsv, byte>(_capture.Width, _capture.Height); CvInvoke.CvtColor(frame, imgHSV, ColorConversion.Rgb2Hsv); CvInvoke.Rectangle(frame, RectROI, new MCvScalar(0, 255, 0), 2); imgHSV.ROI = RectROI; int nOffSetX = RectROI.X; int nOffSetY = RectROI.Y; Hsv hsv_min = new Hsv(HueLow, SatLow, ValLow); //just care about color channel Hsv hsv_max = new Hsv(HueHigh, SatHigh, ValHigh); CvInvoke.Line(frame, new System.Drawing.Point(pCenter.X - 30, pCenter.Y), new System.Drawing.Point(pCenter.X + 30, pCenter.Y), new MCvScalar(255, 0, 0), 2); CvInvoke.Line(frame, new System.Drawing.Point(pCenter.X, pCenter.Y - 30), new System.Drawing.Point(pCenter.X, pCenter.Y + 30), new MCvScalar(255, 0, 0), 2); imgThresholded = imgHSV.InRange(hsv_min, hsv_max); Mat kernel1 = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Ellipse, new System.Drawing.Size(10, 10), new System.Drawing.Point(1, 1)); Image <Gray, byte> tmp2 = imgThresholded.MorphologyEx(MorphOp.Erode, kernel1, new System.Drawing.Point(-1, -1), 1, BorderType.Default, new MCvScalar()); var M = CvInvoke.Moments(tmp2); dArea = M.M00; if (dArea > 5000) { int posBallX = (int)(M.M10 / dArea); int posBallY = (int)(M.M01 / dArea); PosX = (posBallX - pCenter.X + nOffSetX) * 51 / 73; PosY = -(posBallY - pCenter.Y + nOffSetY) * 51 / 73; CvInvoke.Line(frame, new System.Drawing.Point(posBallX - 30 + nOffSetX, posBallY + nOffSetY), new System.Drawing.Point(posBallX + 30 + nOffSetX, posBallY + nOffSetY), new MCvScalar(0, 0, 255), 2); CvInvoke.Line(frame, new System.Drawing.Point(posBallX + nOffSetX, posBallY - 30 + nOffSetY), new System.Drawing.Point(posBallX + nOffSetX, posBallY + 30 + nOffSetY), new MCvScalar(0, 0, 255), 2); } img_Raw_box.Dispatcher.Invoke(() => { img_Raw_box.Source = BitmapSourceConvert.ToBitmapSource(frame); } ); img_Bin_box.Dispatcher.Invoke(() => { img_Bin_box.Source = BitmapSourceConvert.ToBitmapSource(tmp2); } ); } catch (Exception exception) { System.Windows.MessageBox.Show(exception.ToString()); } }
public static Argb HsvToRgb(Hsv hsv) { // HSV contains values scaled as in the color wheel: // that is, all from 0 to 255. // for ( this code to work, HSV.Hue needs // to be scaled from 0 to 360 (it//s the angle of the selected // point within the circle). HSV.Saturation and HSV.value must be // scaled to be between 0 and 1. double r = 0; double g = 0; double b = 0; // Scale Hue to be between 0 and 360. Saturation // and value scale to be between 0 and 1. var h = ((double)hsv.Hue / 255 * 360) % 360; var s = (double)hsv.Saturation / 255; var v = (double)hsv.Value / 255; if (Math.Abs(s) < 0.01) { // If s is 0, all colors are the same. // This is some flavor of gray. r = v; g = v; b = v; } else { // The color wheel consists of 6 sectors. // Figure out which sector you//re in. var sectorPos = h / 60; var sectorNumber = (int)(Math.Floor(sectorPos)); // get the fractional part of the sector. // That is, how many degrees into the sector // are you? var fractionalSector = sectorPos - sectorNumber; // Calculate values for the three axes // of the color. var p = v * (1 - s); var q = v * (1 - (s * fractionalSector)); var t = v * (1 - (s * (1 - fractionalSector))); // Assign the fractional colors to r, g, and b // based on the sector the angle is in. switch (sectorNumber) { case 0: r = v; g = t; b = p; break; case 1: r = q; g = v; b = p; break; case 2: r = p; g = v; b = t; break; case 3: r = p; g = q; b = v; break; case 4: r = t; g = p; b = v; break; case 5: r = v; g = p; b = q; break; } } // return an RGB structure, with values scaled // to be between 0 and 255. return(new Argb(hsv.Alpha, (int)(r * 255), (int)(g * 255), (int)(b * 255))); }
} //Sonogram2Image() public static Image CreateFalseColourDecibelSpectrogram(double[,] dbSpectrogramData, double[,] nrSpectrogramData, byte[,] hits) { double truncateMin = -120.0; double truncateMax = -30.0; double filterCoefficient = 1.0; double[,] dbSpectrogramNorm = NormaliseSpectrogramMatrix(dbSpectrogramData, truncateMin, truncateMax, filterCoefficient); truncateMin = 0; truncateMax = 60; double[,] nrSpectrogramNorm = NormaliseSpectrogramMatrix(nrSpectrogramData, truncateMin, truncateMax, filterCoefficient); int width = dbSpectrogramData.GetLength(0); int height = dbSpectrogramData.GetLength(1); Bitmap image = new Bitmap(width, height); Color[] ridgeColours = { Color.Red, Color.DarkMagenta, Color.Black, Color.LightPink }; for (int y = 0; y < height; y++) //over all freq bins { for (int x = 0; x < width; x++) //for pixels in the line { // NormaliseMatrixValues and bound the value - use min bound, max and 255 image intensity range double dbValue = dbSpectrogramNorm[x, y]; int c1 = 255 - (int)Math.Floor(255.0 * dbValue); //original version //int c1 = (int)Math.Floor(255.0 * dbValue); if (c1 < 0) { c1 = 0; } else if (c1 > 255) { c1 = 255; } var colour = Color.FromArgb(c1, c1, c1); if (nrSpectrogramNorm[x, y] > 0) { // use HSV colour space int bottomColour = 30; // to avoid using the reds int topColour = 320; // to avoid using the magentas int hueRange = topColour - bottomColour; int hue = bottomColour + (int)Math.Floor(hueRange * nrSpectrogramNorm[x, y]); double saturation = 1.0; //double saturation = 0.75 + (nrSpectrogramNorm[x, y] * 0.25); //double saturation = nrSpectrogramNorm[x, y] * 0.5; //double saturation = (1 - nrSpectrogramNorm[x, y]) * 0.5; double value = 1.0; //double value = 0.60 + (nrSpectrogramNorm[x, y] * 0.40); var myHsv = new Hsv { H = hue, S = saturation, V = value }; var myRgb = myHsv.To <Rgb>(); colour = Color.FromArgb((int)myRgb.R, (int)myRgb.G, (int)myRgb.B); // get colour for noise reduced portion // superimpose ridge detection if (hits[x, y] > 0) { //value = 0.60 + (nrSpectrogramNorm[x, y] * 0.40); //myHsv = new Hsv { H = 260, S = saturation, V = value }; //myRgb = myHsv.To<Rgb>(); //colour = Color.FromArgb((int)myRgb.R, (int)myRgb.G, (int)myRgb.B); colour = ridgeColours[hits[x, y] - 1]; } } image.SetPixel(x, height - y - 1, colour); } }//end over all freq bins //image.Save(@"C:\SensorNetworks\Output\Sonograms\TEST3.png", ImageFormat.Png); return(image); }
private void Filter() { // Create thresholds Hsv threshold_lower = new Hsv(Color_spot.Hue - 25, 100, 100); Hsv threshold_higher = new Hsv(Color_spot.Hue + 25, 240, 240); // Blur image and find colors between thresholds Image_filtered = Image_transformed.Convert<Hsv, Byte>().SmoothBlur(20, 20).InRange(threshold_lower, threshold_higher); // Increase size of the spot and remove possible hole where color was too bright Image_filtered = Image_filtered.Dilate(5); // Decrease size again a little, makes it smoother Image_filtered = Image_filtered.Erode(3); }
public Image <Bgr, Byte> ALLDefectDetection(Image <Bgr, Byte> src, Image <Bgr, Byte> dest, CircleF[] closeCircle, double threshold = -0.87, int option = 0) { /* * Purpose : detection Defect used * closeCircle[0]=upper close circle * closeCircle[1]=lower close circle */ //coordinate if (closeCircle[0].Radius - closeCircle[1].Radius < 5) { return(null); } Image <Gray, Byte> grayImage = src.Convert <Gray, Byte>(); int offset = 50; double Cx = closeCircle[0].Center.X, Cy = closeCircle[0].Center.Y, RadiusUpper = closeCircle[0].Radius, RadiusLower = closeCircle[1].Radius; double xp = 0, yp = 0, thet = 0; double xf = 0, yf = 0; try { Image <Gray, Byte> linearImage = new Image <Gray, byte>((int)(RadiusUpper - RadiusLower + offset), (int)(2 * Math.PI * RadiusUpper)); for (int rq = 0; rq < linearImage.Width; rq++) { for (int hq = 0; hq < linearImage.Height; hq++) { thet = (double)hq / RadiusUpper; xp = Cx + (rq + RadiusLower - offset) * Math.Cos(thet); yp = Cy - (rq + RadiusLower - offset) * Math.Sin(thet); xf = xp - (int)xp; yf = yp - (int)yp; //Bilinear linearImage[hq, rq] = new Gray((1 - yf) * ((1 - xf) * grayImage[(int)yp, (int)xp].Intensity + xf * grayImage[(int)yp, (int)xp + 1].Intensity) + yf * ((1 - xf) * grayImage[(int)yp + 1, (int)xp].Intensity + xf * grayImage[(int)yp + 1, (int)xp + 1].Intensity)); } } linearImage._SmoothGaussian(3); //Normal image step1 List <double> colAvg = new List <double>(); List <double> colNormal = new List <double>(); List <double> colSTD = new List <double>(); double allAvg = 0, STD = 0, sum = 0; //calculate average of each col. for (int x = 0; x < linearImage.Width; x++) { sum = 0; for (int y = 0; y < linearImage.Height; y++) { sum += linearImage[y, x].Intensity; } colAvg.Add(sum / linearImage.Height); } allAvg = colAvg.Average(); //total avg. //calculate STD of each col. foreach (double item in colAvg) { STD += Math.Pow(allAvg - item, 2); } STD = Math.Sqrt(STD / colAvg.Count); //normalize foreach (double item in colAvg) { colNormal.Add((item - allAvg) / STD); } //calculate avg. STD of pixel at each col. for (int x = 0; x < linearImage.Width; x++) { sum = 0; for (int y = 0; y < linearImage.Height; y++) { sum += Math.Pow(linearImage[y, x].Intensity - colAvg[x], 2); } colSTD.Add(Math.Sqrt(sum / linearImage.Height)); } //Normal image step2 Image <Bgr, Byte> sNorImage = linearImage.Convert <Bgr, Byte>(); List <double> rowAvg = new List <double>(); List <double> rowNormal = new List <double>(); List <double> rowSTD = new List <double>(); //calculate average of each row for (int y = 0; y < linearImage.Height; y++) { sum = 0; for (int x = 0; x < linearImage.Width; x++) { sum += linearImage[y, x].Intensity; } rowAvg.Add(sum / linearImage.Width); } //calculate STD of each row foreach (var item in rowAvg) { STD += Math.Pow(allAvg - item, 2); } STD = Math.Sqrt(STD / rowAvg.Count); //normalize foreach (var item in rowAvg) { rowNormal.Add((item - allAvg) / STD); } //calculate avg. STD of pixel at each row for (int y = 0; y < linearImage.Height; y++) { sum = 0; for (int x = 0; x < linearImage.Width; x++) { sum += Math.Pow(linearImage[y, x].Intensity - rowAvg[y], 2); } rowSTD.Add(Math.Sqrt(sum / linearImage.Width)); } //rainbowImage Image <Hsv, Byte> rainbowImage = dest.Convert <Hsv, Byte>().CopyBlank(); double factor = 2 * RadiusUpper / 180; int keyPoint = (int)(threshold * (-100) * factor); for (int i = (int)(Cx - RadiusUpper - 10); i < (int)(Cx - RadiusUpper); i++) { for (int j = (int)(Cy - RadiusUpper); j < Cy + RadiusUpper; j++) { rainbowImage[j, i - 10] = new Hsv((j - (Cy - RadiusUpper)) / factor, 255, 255); if (j > (Cy - RadiusUpper + keyPoint - 3) && j < (Cy - RadiusUpper + keyPoint + 3)) { rainbowImage[j, i - 20] = new Hsv(0, 255, 255); } } } //drew output for (int x = offset; x < linearImage.Width; x++) { for (int y = 0; y < linearImage.Height; y++) { thet = (double)y / RadiusUpper; xp = Cx + (x + RadiusLower - offset) * Math.Cos(thet); yp = Cy - (x + RadiusLower - offset) * Math.Sin(thet); double temp = -100 * (((linearImage[y, x].Intensity - rowAvg[y]) / rowSTD[y] - colNormal[x])); rainbowImage[(int)yp, (int)xp] = new Hsv((temp > 180) ? 180 : temp, 255, 255); if (((linearImage[y, x].Intensity - rowAvg[y]) / rowSTD[y] - colNormal[x]) < threshold) { sNorImage[y, x] = new Bgr(Color.Red); thet = (double)y / RadiusUpper; xp = Cx + (x + RadiusLower - offset) * Math.Cos(thet); yp = Cy - (x + RadiusLower - offset) * Math.Sin(thet); dest[(int)yp, (int)xp] = new Bgr(Color.Red); } } } if (option == 0) { return(rainbowImage.Convert <Bgr, Byte>()); } else { return(sNorImage); } }catch (Exception e) { MessageBox.Show(e.ToString()); return(dest); } }
private void box_final_MouseUp(object sender, MouseEventArgs e) { if (!Calibrating_laser) return; // Not in calibration mode Mouse_down = false; // Get scale factors float factor_x = (float)Image_transformed.Width / box_Final.Width; float factor_y = (float)Image_transformed.Height / box_Final.Height; Spot.X = (int)(factor_x * Spot.X); Spot.Y = (int)(factor_y * Spot.Y); Spot.Width = (int)(factor_x * Spot.Width); Spot.Height = (int)(factor_y * Spot.Height); if (Spot.Width * Spot.Height <= 0) return; // Return if spot had no area // Get average color (HSV) of the spot Color_spot = Image_transformed.GetSubRect(Norm_rectangle(Spot)).Convert<Hsv, Byte>().GetAverage(); // Reset spot position and size Spot = new Rectangle(); // Stop calibration mode box_Final.Image = null; box_Final.Cursor = Cursors.Default; Drawings.Clear(box_Final.BackColor); Calibrating_laser = false; // Set calibration successfully completed flag Laser_calibrated = true; }
/// <summary> /// detects the colo bassed on the HSV range given /// </summary> /// <param name="srcHSV"> /// the Hsv value we want to check is a color we know or dont /// </param> /// <param name="HSVTabletcolorRange"> /// the range of colors we know /// </param> /// <returns> /// Color of the tablet /// </returns> protected TabletColors detectcolor(Hsv srcHSV, Hsv[,] HSVTabletcolorRange) { int ol = 0; int oh = 0; if (true == InHSVRange(srcHSV, HSVTabletcolorRange, TabletColors.Green, ol, oh)) {//green return TabletColors.Green; } else if (true == InHSVRange(srcHSV, HSVTabletcolorRange, TabletColors.Red, ol, oh)) {//red return TabletColors.Red; } else if (true == InHSVRange(srcHSV, HSVTabletcolorRange, TabletColors.White, ol, oh)) {//white return TabletColors.White; } else if (true == InHSVRange(srcHSV, HSVTabletcolorRange, TabletColors.Blue, ol, oh)) {//blue return TabletColors.Blue; } else if (true == InHSVRange(srcHSV, HSVTabletcolorRange, TabletColors.Black, ol, oh)) {//black return TabletColors.Black; } else { return TabletColors.Unknown; } }
void gatesRecognition() { Image <Hsv, Byte> imgOriginalHsv; Image <Hsv, Byte> imgProcessedHsv; try { imgOriginal = capVideo.QueryFrame().ToImage <Bgr, Byte>(); } catch (Exception) { } try { imgOriginalHsv = capVideo.QueryFrame().ToImage <Hsv, Byte>(); } catch (Exception) { return; } if (imgOriginal == null) { return; } imgProcessedHsv = imgOriginalHsv.Convert <Hsv, byte>(); if (imgOriginal == null) { return; } Hsv min = new Hsv(0, 5, 10); Hsv max = new Hsv(0, 10, 20); imgProcessed = imgProcessedHsv.InRange(min, max); imgProcessed = imgProcessed.SmoothGaussian(9); imgProcessed = imgProcessed.SmoothBlur(9, 9); LineSegment2D[] lines = imgProcessed.HoughLines(10, 10, 5, 5, 100, 0, 0)[0]; foreach (LineSegment2D line in lines) { if (txtXYRadius.Text != "") { txtXYRadius.AppendText(Environment.NewLine); } txtXYRadius.AppendText("Gates position Top =" + line.P2.ToString().PadLeft(4) /* + * ", Bottom =" + line.P2.ToString().PadLeft(4)*/); txtXYRadius.ScrollToCaret(); CvInvoke.Line(imgOriginal, line.P1, line.P2, new MCvScalar(0, 255, 0), 1, LineType.AntiAlias, 0); imgOriginal.Draw(line, new Bgr(Color.Blue), 3); } ibOriginal.Image = imgOriginal; ibProcessed.Image = imgProcessed; }
/// <summary> /// Tells us if the the HSV color is in range /// </summary> /// <param name="srcHsv"> /// The HSV values we want to check are in range /// </param> /// <param name="targetHsv"> /// The range that we want the color to be in /// </param> /// <param name="lowerLimitExtra"> /// how much extra lower we want se the lower side of the range /// </param> /// <param name="higherLimitExtra"> /// How much higher we want o set higer side of the range /// </param> /// <returns> /// returns true if it is in range, false if it is not /// </returns> protected bool InHSVRange(Hsv srcHsv, Hsv[] targetHsv, int lowerLimitExtra, int higherLimitExtra) { if (targetHsv[(int)HSVRange.Low].Hue < targetHsv[(int)HSVRange.High].Hue) { return ((srcHsv.Hue >= (targetHsv[(int)HSVRange.Low].Hue - lowerLimitExtra)) && //lower values (srcHsv.Satuation >= (targetHsv[(int)HSVRange.Low].Satuation - lowerLimitExtra)) && (srcHsv.Value >= (targetHsv[(int)HSVRange.Low].Value - lowerLimitExtra)) && (srcHsv.Hue <= (targetHsv[(int)HSVRange.High].Hue + higherLimitExtra)) && //higher values (srcHsv.Satuation <= (targetHsv[(int)HSVRange.High].Satuation + higherLimitExtra)) && (srcHsv.Value <= (targetHsv[(int)HSVRange.High].Value + higherLimitExtra))); } else { if (srcHsv.Hue >= (targetHsv[(int)HSVRange.Low].Hue - lowerLimitExtra)) { return ((srcHsv.Hue >= (targetHsv[(int)HSVRange.Low].Hue - lowerLimitExtra)) && //lower values (srcHsv.Satuation >= (targetHsv[(int)HSVRange.Low].Satuation - lowerLimitExtra)) && (srcHsv.Value >= (targetHsv[(int)HSVRange.Low].Value - lowerLimitExtra)) && (srcHsv.Hue <= (179)) && //higher values (srcHsv.Satuation <= (targetHsv[(int)HSVRange.High].Satuation + higherLimitExtra)) && (srcHsv.Value <= (targetHsv[(int)HSVRange.High].Value + higherLimitExtra))); } else if (srcHsv.Hue <= (targetHsv[(int)HSVRange.High].Hue + higherLimitExtra)) { return ((srcHsv.Hue >= (0)) && //lower values (srcHsv.Satuation >= (targetHsv[(int)HSVRange.Low].Satuation - lowerLimitExtra)) && (srcHsv.Value >= (targetHsv[(int)HSVRange.Low].Value - lowerLimitExtra)) && (srcHsv.Hue <= (targetHsv[(int)HSVRange.High].Hue + higherLimitExtra)) && //higher values (srcHsv.Satuation <= (targetHsv[(int)HSVRange.High].Satuation + higherLimitExtra)) && (srcHsv.Value <= (targetHsv[(int)HSVRange.High].Value + higherLimitExtra))); } else return false; } }
public ColorPair(Bgr a, Hsv b) { Bgr = a; Hsv = b; }
private void handleWebcamQueryFrame(object sender, EventArgs e) { webcam.Retrieve(webcamFrame); Image <Hsv, byte> imageHSV = webcamFrame.ToImage <Hsv, byte>(); CvInvoke.CvtColor(imageHSV, imageHSV, ColorConversion.Bgr2Hsv); double hValueMin = 70; double hValueMax = 100; double sValueMin = 100; double sValueMax = 255; double vValueMin = 100; double vValueMax = 255; Hsv lower = new Hsv(hValueMin, sValueMin, vValueMin); Hsv upper = new Hsv(hValueMax, sValueMax, vValueMax); Mat imgGray = imageHSV.InRange(lower, upper).Mat; VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); VectorOfPoint biggestContour = new VectorOfPoint(); int biggestContourIndex = -1; float biggestContourArea = 0f; Mat hierarchy = new Mat(); CvInvoke.FindContours(imgGray, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone); for (int i = 0; i < contours.Size; i++) { if (CvInvoke.ContourArea(contours[i]) > biggestContourArea) { biggestContour = contours[i]; biggestContourIndex = i; biggestContourArea = (float)CvInvoke.ContourArea(contours[i]); } } if (biggestContourIndex > -1) { CvInvoke.DrawContours(webcamFrame, contours, biggestContourIndex, new MCvScalar(255, 0, 0)); } var moments = CvInvoke.Moments(biggestContour); int cx = (int)(moments.M10 / moments.M00); int cy = (int)(moments.M01 / moments.M00); Point p = new Point(cx, cy); int width = webcamFrame.Width; int height = webcamFrame.Height; if (cx < (float)width / 4) { horizontal = 1; } else if (cx > 3 * (float)width / 4) { horizontal = -1; } else { horizontal = 0; } if (cy < (float)height / 4) { vertical = 1; } else if (cy > 3 * (float)height / 4) { vertical = -1; } else { vertical = 0; } Mat flippedImage = webcamFrame.Clone(); CvInvoke.Flip(webcamFrame, flippedImage, FlipType.Horizontal); CvInvoke.Imshow("Test", flippedImage); }
// IControlOverrides overrides protected override void OnKeyDown(KeyRoutedEventArgs args) { if ((this.Orientation == Orientation.Horizontal && args.Key != VirtualKey.Left && args.Key != VirtualKey.Right) || (this.Orientation == Orientation.Vertical && args.Key != VirtualKey.Up && args.Key != VirtualKey.Down)) { base.OnKeyDown(args); return; } ColorPicker parentColorPicker = GetParentColorPicker(); if (parentColorPicker == null) { return; } // Uno Doc: Window must be fully qualified for iOS/macOS where NSWindow maps to Window bool isControlDown = (Windows.UI.Xaml.Window.Current.CoreWindow.GetKeyState(VirtualKey.Control) & CoreVirtualKeyStates.Down) == CoreVirtualKeyStates.Down; double minBound = 0; double maxBound = 0; Hsv currentHsv = parentColorPicker.GetCurrentHsv(); double currentAlpha = 0; switch (this.ColorChannel) { case ColorPickerHsvChannel.Hue: minBound = parentColorPicker.MinHue; maxBound = parentColorPicker.MaxHue; currentHsv.H = this.Value; break; case ColorPickerHsvChannel.Saturation: minBound = parentColorPicker.MinSaturation; maxBound = parentColorPicker.MaxSaturation; currentHsv.S = this.Value / 100; break; case ColorPickerHsvChannel.Value: minBound = parentColorPicker.MinValue; maxBound = parentColorPicker.MaxValue; currentHsv.V = this.Value / 100; break; case ColorPickerHsvChannel.Alpha: minBound = 0; maxBound = 100; currentAlpha = this.Value / 100; break; default: throw new InvalidOperationException("Invalid ColorPickerHsvChannel."); // Uno Doc: 'throw winrt::hresult_error(E_FAIL);' } bool shouldInvertHorizontalDirection = this.FlowDirection == FlowDirection.RightToLeft && !this.IsDirectionReversed; ColorHelpers.IncrementDirection direction = ((args.Key == VirtualKey.Left && !shouldInvertHorizontalDirection) || (args.Key == VirtualKey.Right && shouldInvertHorizontalDirection) || args.Key == VirtualKey.Up) ? ColorHelpers.IncrementDirection.Lower : ColorHelpers.IncrementDirection.Higher; ColorHelpers.IncrementAmount amount = isControlDown ? ColorHelpers.IncrementAmount.Large : ColorHelpers.IncrementAmount.Small; if (this.ColorChannel != ColorPickerHsvChannel.Alpha) { currentHsv = ColorHelpers.IncrementColorChannel(currentHsv, this.ColorChannel, direction, amount, false /* shouldWrap */, minBound, maxBound); } else { currentAlpha = ColorHelpers.IncrementAlphaChannel(currentAlpha, direction, amount, false /* shouldWrap */, minBound, maxBound); } switch (this.ColorChannel) { case ColorPickerHsvChannel.Hue: this.Value = currentHsv.H; break; case ColorPickerHsvChannel.Saturation: this.Value = currentHsv.S * 100; break; case ColorPickerHsvChannel.Value: this.Value = currentHsv.V * 100; break; case ColorPickerHsvChannel.Alpha: this.Value = currentAlpha * 100; break; default: throw new InvalidOperationException("Invalid ColorPickerHsvChannel."); // Uno Doc: 'MUX_ASSERT(false);' } args.Handled = true; }
/// <summary> /// Converts a <see cref="Hsv"/> into a <see cref="Rgb"/> /// </summary> /// <param name="color">The color to convert.</param> /// <returns>The <see cref="Rgb"/></returns> public Rgb ToRgb(Hsv color) { // Conversion return(HsvAndRgbConverter.Convert(color)); }
private void ReadValuesFromSettings() { blueMin = ParseSettingToColor(Properties.Settings.Default.blueMin); blueMax = ParseSettingToColor(Properties.Settings.Default.blueMax); redMin = ParseSettingToColor(Properties.Settings.Default.redMin); redMax = ParseSettingToColor(Properties.Settings.Default.redMax); redMin2 = ParseSettingToColor(Properties.Settings.Default.redMin2); redMax2 = ParseSettingToColor(Properties.Settings.Default.redMax2); greenMin = ParseSettingToColor(Properties.Settings.Default.greenMin); greenMax = ParseSettingToColor(Properties.Settings.Default.greenMax); yellowMin = ParseSettingToColor(Properties.Settings.Default.yellowMin); yellowMax = ParseSettingToColor(Properties.Settings.Default.yellowMax); }
private string GetToolTipString() { uint sliderValue = (uint)(Math.Round(this.Value)); if (this.ColorChannel == ColorPickerHsvChannel.Alpha) { return(StringUtil.FormatString( ResourceAccessor.GetLocalizedStringResource(ResourceAccessor.SR_ToolTipStringAlphaSlider), sliderValue)); } else { ColorPicker parentColorPicker = GetParentColorPicker(); if (parentColorPicker != null && DownlevelHelper.ToDisplayNameExists()) { Hsv currentHsv = parentColorPicker.GetCurrentHsv(); string localizedString; switch (this.ColorChannel) { case ColorPickerHsvChannel.Hue: currentHsv.H = this.Value; localizedString = ResourceAccessor.GetLocalizedStringResource(ResourceAccessor.SR_ToolTipStringHueSliderWithColorName); break; case ColorPickerHsvChannel.Saturation: localizedString = ResourceAccessor.GetLocalizedStringResource(ResourceAccessor.SR_ToolTipStringSaturationSliderWithColorName); currentHsv.S = this.Value / 100; break; case ColorPickerHsvChannel.Value: localizedString = ResourceAccessor.GetLocalizedStringResource(ResourceAccessor.SR_ToolTipStringValueSliderWithColorName); currentHsv.V = this.Value / 100; break; default: throw new InvalidOperationException("Invalid ColorPickerHsvChannel."); // Uno Doc: 'throw winrt::hresult_error(E_FAIL);' } return(StringUtil.FormatString( localizedString, sliderValue, ColorHelper.ToDisplayName(ColorConversion.ColorFromRgba(ColorConversion.HsvToRgb(currentHsv))))); } else { string localizedString; switch (this.ColorChannel) { case ColorPickerHsvChannel.Hue: localizedString = ResourceAccessor.GetLocalizedStringResource(ResourceAccessor.SR_ToolTipStringHueSliderWithoutColorName); break; case ColorPickerHsvChannel.Saturation: localizedString = ResourceAccessor.GetLocalizedStringResource(ResourceAccessor.SR_ToolTipStringSaturationSliderWithoutColorName); break; case ColorPickerHsvChannel.Value: localizedString = ResourceAccessor.GetLocalizedStringResource(ResourceAccessor.SR_ToolTipStringValueSliderWithoutColorName); break; default: throw new InvalidOperationException("Invalid ColorPickerHsvChannel."); // Uno Doc: 'throw winrt::hresult_error(E_FAIL);' } return(StringUtil.FormatString( localizedString, sliderValue)); } } }
/// <summary> /// HSV color filter, passes all pixels between the min-max limits. /// </summary> /// <param name="src"> Image to filter </param> /// <param name="min"> lower hsv point </param> /// <param name="max"> upper hsv point </param> /// <returns> binary image (high pixels are in range) </returns> private Image <Gray, byte> FilterHSV(Image <Bgr, byte> src, Hsv min, Hsv max) { Image <Hsv, byte> hsv = src.Convert <Hsv, byte>(); return(hsv.InRange(min, max)); }
private string HsvToString(Hsv color) { return (int)color.Hue + "," + (int)color.Satuation + "," + (int)color.Value; }
private void ProcessFrame(object sender, EventArgs arg) { Image <Bgr, Byte> frame = _capture.RetrieveBgrFrame(); var minHsv = new Hsv(_minHue, _minSat, _minVal); var maxHsv = new Hsv(_maxHue, _maxSat, _maxVal); Image <Gray, Byte> grayFrame = frame.Convert <Hsv, Byte>().InRange(minHsv, maxHsv); Image <Gray, Byte> smallGrayFrame = grayFrame.PyrDown(); Image <Gray, Byte> smoothedGrayFrame = smallGrayFrame.PyrUp(); Image <Gray, Byte> cannyFrame = smoothedGrayFrame.Canny(100, 60); // This takes our nice looking input and blows or cuts off values // above or below a bightness threshold Image <Gray, byte> webcamThreshImg = smoothedGrayFrame.ThresholdBinary(new Gray(150), new Gray(255)); // The magic blob detection code Emgu.CV.Cvb.CvBlobs resultingWebcamBlobs = new Emgu.CV.Cvb.CvBlobs(); Emgu.CV.Cvb.CvBlobDetector bDetect = new Emgu.CV.Cvb.CvBlobDetector(); uint numWebcamBlobsFound = bDetect.Detect(webcamThreshImg, resultingWebcamBlobs); Emgu.CV.Cvb.CvBlob largestBlob = null; foreach (var blob in resultingWebcamBlobs) { if (blob.Value.Area > MINIMUM_BLOB_SIZE_IN_PX) { if (largestBlob == null || (largestBlob.Area < blob.Value.Area)) { largestBlob = blob.Value; } } } if (largestBlob != null) { // Draw bounding box around target frame.Draw(largestBlob.BoundingBox, new Bgr(0, 255, 255), 5); System.Drawing.Point center = new System.Drawing.Point(frame.Width / 2, frame.Height / 2); double thresholdWidth = MOVEMENT_THRESHOLD_PERCENTAGE * frame.Width; double thresholdHeight = MOVEMENT_THRESHOLD_PERCENTAGE * frame.Height; if (largestBlob.Centroid.X < (center.X - thresholdWidth)) { XGunState = XGunState.Left; } else if (largestBlob.Centroid.X > (center.X + thresholdWidth)) { XGunState = XGunState.Right; } else { XGunState = XGunState.Idle; } if (largestBlob.Centroid.Y > (center.Y + thresholdHeight)) { YGunState = YGunState.Down; } else if (largestBlob.Centroid.Y < (center.Y - thresholdHeight)) { YGunState = YGunState.Up; } else { YGunState = YGunState.Idle; } if (YGunState != PrevYGunState && _serialConnected && !_manualMode) { PrevYGunState = YGunState; SerialPort.Write(_yGunStateCommands[YGunState]); } if (XGunState != PrevXGunState && _serialConnected && !_manualMode) { PrevXGunState = XGunState; SerialPort.Write(_xGunStateCommands[XGunState]); } } else if (_serialConnected) { PrevXGunState = XGunState = XGunState.Idle; PrevYGunState = YGunState = YGunState.Idle; SerialPort.Write(_xGunStateCommands[XGunState.Idle] + _yGunStateCommands[YGunState.Idle]); } // Camera frame callback is not on UI tread. Need to get there to set image source this.Dispatcher.InvokeAsync(() => { commandLabel.Content = string.Format("XGunState: {0}, YGunState: {1}", XGunState, YGunState); if (!_test) { image.Source = ToBitmapSource(frame); } else { image.Source = ToBitmapSource(smoothedGrayFrame); } }); }
public CardColor Classify(Hsv hsv) { float[] array = new float[] { (float)hsv.Hue, (float)hsv.Satuation, (float)hsv.Value }; CardColor outcome = (CardColor)kmeans.Classify(array); return outcome; }
/// <summary> /// Detects if the tray is there, and works out it's location /// </summary> /// <param name="src"> /// image that contains the tray /// </param> /// <returns>true if tray found, false if not</returns> /// <note> /// do the returns /// </note> private bool DetectTray(Image<Bgr, Byte> src) { Point[] line = new Point[2]; //holds the location of the start and end yellow line in the picture double angle; //angle of the tray on the conveyor Hsv[] HSVT = new Hsv[2]; //hold the color of the yellow line HSVT[(int)HSVRange.Low].Hue = 19; HSVT[(int)HSVRange.Low].Satuation = 80;//108.24; HSVT[(int)HSVRange.Low].Value = 240;//183.68; HSVT[(int)HSVRange.High].Hue = 39; HSVT[(int)HSVRange.High].Satuation = 255;//331.92; HSVT[(int)HSVRange.High].Value = 255;//360; Image<Bgr, Byte> col = RemoveEverythingButRange(src, HSVT); //we want to remove everything that is not yellow int rows = col.Rows; int cols = col.Cols; col = col.Resize((int)(cols / 2.5), (int)(rows / 2.5), INTER.CV_INTER_AREA); col = col.Resize(cols, rows, INTER.CV_INTER_AREA); Image<Gray, Byte> Gsrc = col.Convert<Gray, Byte>(); saveImage(col, "only true yellow.jpg"); int countLines; col = RemoveEverythingButRange(apply_Hough(col.Clone(),out countLines), HSVT); //we want to remove everything that is not yellow if (countLines < 3) return false; Gsrc = col.Convert<Gray, Byte>(); line = scanImgForLine(Gsrc); //get location of the yellow line angle = AngleOfTray(line); //get the angle of the tray angle -= 90; //remove 90 so the angle is starting from zero Angle = angle;//make ref Debug.WriteLine(DateTime.Now.ToString("h:mm:ss tt>> ") + "Angle: " + Angle); Debug.Flush(); //double Mag = Math.Sqrt(Math.Pow((line[0].X - line[1].X),2) + Math.Pow((line[0].Y - line[1].Y),2) ); saveImage(col, "only yellow lines.jpg"); //saveImage(apply_Hough(col.Clone()), "lines in tray.jpg"); int x = line[0].Y - line[1].Y; int y = line[0].X - line[1].X; //work out the points trayPoints[1].X = line[1].X; trayPoints[1].Y = line[1].Y; trayPoints[3].X = line[0].X; trayPoints[3].Y = line[0].Y; trayPoints[0].X = (line[1].X - x); trayPoints[0].Y = (line[1].Y + y); trayPoints[2].X = (line[0].X - x); trayPoints[2].Y = (line[0].Y + y); //work out the value we need to crop the image int xOrig = Math.Min(Math.Min(trayPoints[0].X, trayPoints[2].X), Math.Min(trayPoints[1].X, trayPoints[3].X)); int yOrig = Math.Min(Math.Min(trayPoints[0].Y, trayPoints[2].Y), Math.Min(trayPoints[1].Y, trayPoints[3].Y)); int xWidth = Math.Max(Math.Max(trayPoints[0].X, trayPoints[2].X), Math.Max(trayPoints[1].X, trayPoints[3].X)) - xOrig; int yHeight = Math.Max(Math.Max(trayPoints[0].Y, trayPoints[2].Y), Math.Max(trayPoints[1].Y, trayPoints[3].Y)) - yOrig; imgTray = CropImage(src, xOrig, yOrig, xWidth, yHeight);//crop the image saveImage(imgTray, "only tray.jpg"); return true; }
/// <summary> /// Converts a <see cref="Hsv"/> into a <see cref="CieLch"/> /// </summary> /// <param name="color">The color to convert.</param> /// <returns>The <see cref="CieLch"/></returns> public CieLch ToCieLch(Hsv color) { CieXyz xyzColor = this.ToCieXyz(color); return(this.ToCieLch(xyzColor)); }
public void HsvMagic(Image<Bgr, Byte> src, Image<Gray, Byte> black_dst, Image<Gray, Byte> blue_dst) { Hsv blueVal_min = new Hsv(0, 50, 125); Hsv blueVal_max = new Hsv(359.9, 255, 255); Hsv blackVal_min = new Hsv(0, 0, 100); Hsv blackVal_max = new Hsv(360, 255, 255); imgHsv = src.Convert<Hsv, Byte>(); // borders maskHsvBlack = getBlackHsvMack(imgHsv, blackVal_min, blackVal_max); //blue maskHsvBlue = getBlueHsvMask(imgHsv, blueVal_min, blueVal_max); //other //maskHsv = new Image<Gray, Byte>(imgHsv.Width, imgHsv.Height); //CvInvoke.BitwiseXor(maskHsvBlue, maskHsvBlack, maskHsv); }
/// <summary> /// /// </summary> /// <param name="h">HUE</param> /// <param name="s">Saturação</param> /// <param name="v">Valor</param> /// <param name="deltalow">Variação inferior</param> /// <param name="deltahigh">Variação superior</param> public Range(double h, double s, double v, int deltalow = 30, int deltahigh = 30) { lowerrange = new Hsv(h - deltalow, s - deltalow, v - deltalow); upperrange = new Hsv(h + deltahigh, s + deltahigh, v + deltahigh); }
private Image<Gray, byte> getSkinOnImage(Image<Hsv, byte> sourceImage, Hsv Hsv_min, Hsv Hsv_Max) { Image<Gray, Byte> skin = sourceImage.InRange(Hsv_min, Hsv_Max); skin = skin .SmoothGaussian(11) .Dilate(3) .SmoothGaussian(5) .Convert<Rgb, Byte>() .ThresholdBinary(new Rgb(127, 127, 127), new Rgb(255, 255, 255)) .Convert<Gray, Byte>(); return skin; }
//for one color range public static Image<Gray, byte> FilterColor(Image<Hsv,Byte> src, Hsv colorLow, Hsv colorHigh, string debugWindowName = "") { Image<Gray,byte> result = src.InRange(colorLow, colorHigh); ShowInNamedWindow(result, debugWindowName); return result; }
public static Hsv IncrementColorChannel( Hsv originalHsv, ColorPickerHsvChannel channel, IncrementDirection direction, IncrementAmount amount, bool shouldWrap, double minBound, double maxBound) { Hsv newHsv = originalHsv; if (amount == IncrementAmount.Small || !DownlevelHelper.ToDisplayNameExists()) { // In order to avoid working with small values that can incur rounding issues, // we'll multiple saturation and value by 100 to put them in the range of 0-100 instead of 0-1. newHsv.S *= 100; newHsv.V *= 100; // Uno Doc: *valueToIncrement replaced with ref local variable for C#, must be initialized ref double valueToIncrement = ref newHsv.H; double incrementAmount = 0.0; // If we're adding a small increment, then we'll just add or subtract 1. // If we're adding a large increment, then we want to snap to the next // or previous major value - for hue, this is every increment of 30; // for saturation and value, this is every increment of 10. switch (channel) { case ColorPickerHsvChannel.Hue: valueToIncrement = ref newHsv.H; incrementAmount = amount == IncrementAmount.Small ? 1 : 30; break; case ColorPickerHsvChannel.Saturation: valueToIncrement = ref newHsv.S; incrementAmount = amount == IncrementAmount.Small ? 1 : 10; break; case ColorPickerHsvChannel.Value: valueToIncrement = ref newHsv.V; incrementAmount = amount == IncrementAmount.Small ? 1 : 10; break; default: throw new InvalidOperationException("Invalid ColorPickerHsvChannel."); // Uno Doc: 'winrt::hresult_error(E_FAIL);' } double previousValue = valueToIncrement; valueToIncrement += (direction == IncrementDirection.Lower ? -incrementAmount : incrementAmount); // If the value has reached outside the bounds, we were previous at the boundary, and we should wrap, // then we'll place the selection on the other side of the spectrum. // Otherwise, we'll place it on the boundary that was exceeded. if (valueToIncrement < minBound) { valueToIncrement = (shouldWrap && previousValue == minBound) ? maxBound : minBound; } if (valueToIncrement > maxBound) { valueToIncrement = (shouldWrap && previousValue == maxBound) ? minBound : maxBound; } // We multiplied saturation and value by 100 previously, so now we want to put them back in the 0-1 range. newHsv.S /= 100; newHsv.V /= 100; }
private CardColor RecognizeColor() { /* Set the contour as the ROI of the image * Make an empty image on which you fill the inner of the contour. This is the mask. * Now, AND-the mask with the image. * Calculate the average color of the image, for each channel. * * Then, do some thresholding on RGB and/or HSV. The values can be got from paint. * */ #region extract color Image<Bgr, Byte> focusBgr = Node.Image; focusBgr = RemoveCardColor(this.Node, focusBgr); Bgr avgBgr = new Bgr(); MCvScalar scr1 = new MCvScalar(); focusBgr.AvgSdv(out avgBgr, out scr1, Node.AttentionMask); Image<Hsv, Byte> focusHsv = focusBgr.Convert<Hsv, Byte>(); Hsv avgHsv = new Hsv(); MCvScalar scr2 = new MCvScalar(); focusHsv.AvgSdv(out avgHsv, out scr2, Node.AttentionMask); #endregion CardColor color = ClassifyColor(avgBgr, avgHsv); ContourNode parent = Node.FindParent(Shape.Card, null, null); if (parent != null) { double colDist = ColorDistance(avgBgr, parent.averageBgr); } this.Node.averageBgr = avgBgr; this.Node.averageHsv = avgHsv; #region debug #if DEBUG Image<Bgr, Byte> debug = new Image<Bgr, Byte>(250, 200); debug.SetValue(avgBgr); string bgrstr = ((int)avgBgr.Red).ToString() + "," + ((int)avgBgr.Green).ToString() + "," + ((int)avgBgr.Blue).ToString(); string hsvstr = ((int)avgHsv.Hue).ToString() + "," + ((int)avgHsv.Satuation).ToString() + "," + ((int)avgHsv.Value).ToString(); debug = debug.ConcateHorizontal(focusBgr); Image<Bgr, Byte> rgbMask = new Image<Bgr, byte>(new Image<Gray, byte>[] { Node.AttentionMask, Node.AttentionMask, Node.AttentionMask }); //debug = debug.ConcateHorizontal(rgbMask); //ImageViewer.Show(debug, "rgb(" + bgrstr + ") hsv(" + hsvstr + ")"+"Classified as "+color.ToString()); #endif #endregion return color; }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); RequestWindowFeature(WindowFeatures.NoTitle); SetContentView(Resource.Layout.Config); colors = ColorConversion.GetColorSetupFromXML(); var color = new Hsv { H = double.Parse(colors.TemperatureColor.LowColor.H), S = double.Parse(colors.TemperatureColor.LowColor.S), V = double.Parse(colors.TemperatureColor.LowColor.V) }.ToColor().ToAndroidColor(); btnLowTemp = (Button)FindViewById(Resource.Id.btnLowTemp); btnLowTemp.Click += btnLowTemp_Click; btnLowTemp.SetBackgroundColor(color); color = new Hsv { H = double.Parse(colors.TemperatureColor.HighColor.H), S = double.Parse(colors.TemperatureColor.HighColor.S), V = double.Parse(colors.TemperatureColor.HighColor.V) }.ToColor().ToAndroidColor(); btnHighTemp = (Button)FindViewById(Resource.Id.btnHighTemp); btnHighTemp.Click += btnHighTemp_Click; btnHighTemp.SetBackgroundColor(color); color = new Hsv { H = double.Parse(colors.HumidityColor.LowColor.H), S = double.Parse(colors.HumidityColor.LowColor.S), V = double.Parse(colors.HumidityColor.LowColor.V) }.ToColor().ToAndroidColor(); btnLowHumidity = (Button)FindViewById(Resource.Id.btnLowHumidity); btnLowHumidity.Click += btnLowHumidity_Click; btnLowHumidity.SetBackgroundColor(color); color = new Hsv { H = double.Parse(colors.HumidityColor.HighColor.H), S = double.Parse(colors.HumidityColor.HighColor.S), V = double.Parse(colors.HumidityColor.HighColor.V) }.ToColor().ToAndroidColor(); btnHighHumidity = (Button)FindViewById(Resource.Id.btnHighHumidity); btnHighHumidity.Click += btnHighHumidity_Click; btnHighHumidity.SetBackgroundColor(color); color = new Hsv { H = double.Parse(colors.ProximityColor.LowColor.H), S = double.Parse(colors.ProximityColor.LowColor.S), V = double.Parse(colors.ProximityColor.LowColor.V) }.ToColor().ToAndroidColor(); btnLowProximity = (Button)FindViewById(Resource.Id.btnLowProximity); btnLowProximity.Click += btnLowProximity_Click; btnLowProximity.SetBackgroundColor(color); color = new Hsv { H = double.Parse(colors.ProximityColor.HighColor.H), S = double.Parse(colors.ProximityColor.HighColor.S), V = double.Parse(colors.ProximityColor.HighColor.V) }.ToColor().ToAndroidColor(); btnHighProximity = (Button)FindViewById(Resource.Id.btnHighProximity); btnHighProximity.Click += btnHighProximity_Click; btnHighProximity.SetBackgroundColor(color); color = new Hsv { H = double.Parse(colors.HourColor.LowColor.H), S = double.Parse(colors.HourColor.LowColor.S), V = double.Parse(colors.HourColor.LowColor.V) }.ToColor().ToAndroidColor(); btnLowHour = (Button)FindViewById(Resource.Id.btnLowHour); btnLowHour.Click += btnLowHour_Click; btnLowHour.SetBackgroundColor(color); color = new Hsv { H = double.Parse(colors.HourColor.HighColor.H), S = double.Parse(colors.HourColor.HighColor.S), V = double.Parse(colors.HourColor.HighColor.V) }.ToColor().ToAndroidColor(); btnHighHour = (Button)FindViewById(Resource.Id.btnHighHour); btnHighHour.Click += btnHighHour_Click; btnHighHour.SetBackgroundColor(color); txtLowTemp = (EditText)FindViewById(Resource.Id.txtLowTempValue); txtHighTemp = (EditText)FindViewById(Resource.Id.txtHighTempValue); txtLowHumidity = (EditText)FindViewById(Resource.Id.txtLowHumidityValue); txtHighHumidity = (EditText)FindViewById(Resource.Id.txtHighHumidityValue); txtLowTemp.Text = colors.TemperatureColor.LowColor.Threshold; txtHighTemp.Text = colors.TemperatureColor.HighColor.Threshold; txtLowHumidity.Text = colors.HumidityColor.LowColor.Threshold; txtHighHumidity.Text = colors.HumidityColor.HighColor.Threshold; txtLowTemp.FocusChange += UpdateValues; txtHighTemp.FocusChange += UpdateValues; txtLowHumidity.FocusChange += UpdateValues; txtHighHumidity.FocusChange += UpdateValues; imm = (InputMethodManager)GetSystemService(InputMethodService); var layout = (LinearLayout)FindViewById(Resource.Id.parentLayout); layout.Click += ConfigActivity_Click; layout.RequestFocus(); }
public Form1() { InitializeComponent(); faceHaar = new CascadeClassifier(@"..\..\haar\haarcascade_frontalface.xml"); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); //YCrCb_min = new Ycc(0, 131, 80); YCrCb_min = new Ycc(0, 140, 0); YCrCb_max = new Ycc(255, 185, 135); currentFrameList = new List<Image<Bgr, byte>>(); grayFrameList = new List<Image<Gray, byte>>(); handMotionList = new List<Image<Gray, byte>>(); contourStorage = new MemStorage(); box = new MCvBox2D(); fingers = new List<LineSegment2D>(); motionPoints = new List<PointF>(); gestures = new List<string>(); runTimes = new List<long>(); faceEmotions = new List<string>(); }
private string HsvToString(Hsv color) { return((int)color.Hue + "," + (int)color.Satuation + "," + (int)color.Value); }
private void ProcessFrame(object sender, EventArgs arg) { Image<Bgr, Byte> imageFrame=null; while (imageFrame == null) { imageFrame = capture.QueryFrame(); } imageFrame = imageFrame.Flip(Emgu.CV.CvEnum.FLIP.HORIZONTAL); Image<Hsv, Byte> hsvFrame = imageFrame.Convert<Hsv, Byte>(); if (calibrationHSVLevel) { CalibrateHSV(ref hsvFrame, ref histogram); imageFrame = hsvFrame.Convert<Bgr, byte>(); } else { if (histogram != null) { //* //BACKPROJECTION //* //* powinno działać, ale nie chce - nie wiem czemu, a nie ma nic gorszego od czytania histogramów //* /*Image<Gray, byte>[] channels = imageFrame.Split(); Image<Gray, byte> backProjection = histogram.BackProject<byte>(channels); Image<Gray, byte> mask = hsvFrame.InRange( new Hsv(hue_min, saturation_min, value_min), new Hsv(hue_max, saturation_max, value_max) ); backProjection.And(mask); hsvFrame = backProjection.Convert<Hsv, byte>().Dilate(5).Erode(3);//*/ Hsv Hsv_min = new Hsv(hue_min, saturation_min, value_min); Hsv Hsv_max = new Hsv(hue_max, saturation_max, value_max); Image<Gray, Byte> skin = getSkinOnImage(hsvFrame, Hsv_min, Hsv_max); FindContourAndConvexHull(skin, imageFrame); hand = ComputeHandInfo(imageFrame); if (hand != null) { label16.Text = "" + hand.FingersCount; label15.Text = "" + hand.HandSizeRatio(); label14.Text = "" + hand.Height; label13.Text = "" + hand.Left; label12.Text = "" + hand.Top; } } } addFPS(imageFrame, 10, 30); imageBox1.Image = imageFrame; //imageBox2.Image = hsvFrame; }
/// <summary> /// Convert <see cref="Hsv"/> to <see cref="Color"/>. /// </summary> /// <param name="converter">Converter.</param> /// <param name="hsv">Source <see cref="Hsv"/>.</param> /// <returns>Converted <see cref="Color"/>.</returns> public static Color ToColor(this ColorSpaceConverter converter, Hsv hsv) { return(ToColor(converter.ToRgb(hsv))); }
public static Bgr[] GetRandomPalette(int paletteSize) { Image<Hsv,byte> hsvPalette = new Image<Hsv,byte>(paletteSize,1); Image<Bgr,byte> bgrPalette; Bgr[] palette = new Bgr[paletteSize]; for (int i = 0; i < paletteSize; ++i) { int x = i*25; hsvPalette[0,i] = new Hsv(x % 180, 255*(1-(x/900*0.25)) ,255); } bgrPalette = hsvPalette.Convert<Bgr, byte>(); palette[0] = new Bgr(0, 0, 0); for (int i = 1; i < paletteSize; ++i) { palette[i] = bgrPalette[0,i]; } hsvPalette.Dispose(); bgrPalette.Dispose(); return palette; }
public Bitmap MainHandler(Image <Bgr, Byte> input, int showStep) { Bitmap result = input.ToBitmap(); //TODO Cut out the Motor //Create HSV Image Image <Hsv, Byte> Im_Hsv = input.Convert <Hsv, byte>(); //Histogrammlinearisierung der Satuierung Image <Gray, Byte> Linchan = Im_Hsv[1]; Linchan._EqualizeHist(); Im_Hsv[1] = Linchan; Linchan.Dispose(); if (showStep == 0) { result = Im_Hsv.ToBitmap(); } //Define Color Masks Image <Gray, byte> redcut; Image <Gray, byte> bluecut; Image <Gray, byte> yellcut; //Create Red mask Hsv redu = new Hsv(0, 220, 0); Hsv redo = new Hsv(10, 255, 255); Image <Gray, byte> undercut = Im_Hsv.InRange(redu, redo); redu = new Hsv(160, 220, 0); redo = new Hsv(179, 255, 255); Image <Gray, byte> uppercut = Im_Hsv.InRange(redu, redo); redcut = undercut.Add(uppercut); undercut.Dispose(); uppercut.Dispose(); //Create Blue Mask redu = new Hsv(110, 220, 0); redo = new Hsv(130, 255, 255); bluecut = Im_Hsv.InRange(redu, redo); //Create Yellow Mask redu = new Hsv(25, 130, 0); redo = new Hsv(35, 255, 255); yellcut = Im_Hsv.InRange(redu, redo); //Median on Masks -not necessary Image <Gray, Byte> totalcutMed = redcut.SmoothMedian(3); totalcutMed = totalcutMed.Add(yellcut.SmoothMedian(3)).Add(bluecut.SmoothMedian(3)); //Edge Detection - Sobel Image <Gray, Single> IM_Edge = Im_Hsv.Convert <Gray, Single>(); IM_Edge = IM_Edge.Sobel(1, 0, 3); switch (showStep) { case 1: result = redcut.ToBitmap(); break; case 2: result = yellcut.ToBitmap(); break; case 3: result = bluecut.ToBitmap(); break; case 4: result = redcut.Add(yellcut).Add(bluecut).ToBitmap(); break; case 5: result = totalcutMed.ToBitmap(); break; case 6: result = IM_Edge.ToBitmap(); break; default: result = Im_Hsv.ToBitmap(); break; } return(result); }