public PointF NormalizedCentroid(Segment s) { //normalize the points (relative to the image center (0,0)) PointF[] normalizedPoints = s.points.Select<Point, PointF>(p => new PointF(-0.5f + (float)p.X / imageWidth, -0.5f + (float)p.Y / imageHeight)).ToArray<PointF>(); //Add the relative centroid float cX = 0; float cY = 0; foreach (PointF p in normalizedPoints) { cX += p.X; cY += p.Y; } cX /= normalizedPoints.Count(); cY /= normalizedPoints.Count(); return new PointF(cX, cY); }
public PointF RandomNormalizedInteriorPoint(Segment s) { PointF[] normalizedPoints = s.points.Select<Point, PointF>(p => new PointF(-0.5f + (float)p.X / imageWidth, -0.5f + (float)p.Y / imageHeight)).ToArray<PointF>(); Random r = new Random(); int randIndex = r.Next(normalizedPoints.Length); return normalizedPoints[randIndex]; }
public void ComputeFeatures(Segment s) { //Add the relative size NamedFeature f = new NamedFeature("RelativeSize"); f.values.Add(s.points.Count() / (double)(imageWidth * imageHeight)); s.features.Add(f); // Relative centroid PointF c = NormalizedCentroid(s); s.features.Add(new NamedFeature("RelativeCentroid", new List<double>{c.X, c.Y})); // One interior point PointF np = RandomNormalizedInteriorPoint(s); s.features.Add(new NamedFeature("OneInteriorPoint", new List<double> { np.X, np.Y })); //Radial distance s.features.Add(new NamedFeature("RadialDistance", new List<double>{Math.Sqrt(c.X*c.X+c.Y*c.Y)})); //Normalized Discrete Compactness http://www.m-hikari.com/imf-password2009/25-28-2009/bribiescaIMF25-28-2009.pdf //Find the segment id Point sp = s.points.First(); int sidx = assignments[sp.X, sp.Y]; //count number of perimeter edges int perimeter = 0; foreach (Point p in s.points) { for (int i = -1; i <= 1; i++) { for (int j = -1; j <= 1; j++) { if (Math.Abs(i) == Math.Abs(j)) continue; if (Util.InBounds(p.X + i, p.Y + j, imageWidth, imageHeight) && assignments[p.X + i, p.Y + j] != sidx) perimeter++; else if (!Util.InBounds(p.X + i, p.Y + j, imageWidth, imageHeight)) //edge pixels should be considered perimeter too perimeter++; } } } int n = s.points.Count(); double CD = (4.0 * n - perimeter) / 2; double CDmin = n - 1; double CDmax = (4 * n - 4 * Math.Sqrt(n)) / 2; double CDN = (CD - CDmin) / Math.Max(1,(CDmax - CDmin)); s.features.Add(new NamedFeature("NormalizedDiscreteCompactness", new List<double> { CDN })); //Add elongation (width/length normalized between 0-square to 1-long http://hal.archives-ouvertes.fr/docs/00/44/60/37/PDF/ARS-Journal-SurveyPatternRecognition.pdf PointF[] points = s.points.Select<Point, PointF>(p => new PointF(p.X, p.Y)).ToArray<PointF>(); Emgu.CV.Structure.MCvBox2D box = Emgu.CV.PointCollection.MinAreaRect(points); PointF[] vertices = box.GetVertices(); double elongation = 1 - Math.Min(box.size.Width + 1, box.size.Height + 1) / Math.Max(box.size.Width + 1, box.size.Height + 1); s.features.Add(new NamedFeature("Elongation", new List<double>{elongation})); //Add Hu shape moments, invariant to translation, scale, and rotation (not sure what each measure refers to intuitively though, or if there is an intuitive analog) //They may also do badly on noisy data however. See: http://hal.archives-ouvertes.fr/docs/00/44/60/37/PDF/ARS-Journal-SurveyPatternRecognition.pdf (called Invariant Moments) Bitmap regionBitmap = new Bitmap(imageWidth, imageHeight); Graphics g = Graphics.FromImage(regionBitmap); g.FillRectangle(new SolidBrush(Color.Black), 0, 0, imageWidth, imageHeight); foreach (Point p in s.points) { regionBitmap.SetPixel(p.X, p.Y, Color.White); } Emgu.CV.Image<Gray, byte> region = new Emgu.CV.Image<Gray, byte>(regionBitmap); MCvMoments moment = region.GetMoments(true); MCvHuMoments hu = moment.GetHuMoment(); s.features.Add(new NamedFeature("HuMoments", new List<double> {hu.hu1, hu.hu2, hu.hu3,hu.hu4,hu.hu5, hu.hu6, hu.hu7 })); region.Dispose(); regionBitmap.Dispose(); }