// returns a less complex polygon that satisfies the curve tolerance public static NFP cleanPolygon(NFP polygon) { var p = svgToClipper2(polygon); // remove self-intersections and find the biggest polygon that's left var simple = ClipperLib.Clipper.SimplifyPolygon(p.ToList(), ClipperLib.PolyFillType.pftNonZero); if (simple == null || simple.Count == 0) { return(null); } var biggest = simple[0]; var biggestarea = Math.Abs(ClipperLib.Clipper.Area(biggest)); for (var i = 1; i < simple.Count; i++) { var area = Math.Abs(ClipperLib.Clipper.Area(simple[i])); if (area > biggestarea) { biggest = simple[i]; biggestarea = area; } } // clean up singularities, coincident points and edges var clean = ClipperLib.Clipper.CleanPolygon(biggest, 0.01 * Config.curveTolerance * Config.clipperScale); if (clean == null || clean.Count == 0) { return(null); } return(clipperToSvg(clean)); }
// rest of the code doesn't care about point format // basic distance-based simplification public static NFP simplifyRadialDist(NFP points, double?sqTolerance) { var prevPoint = points[0]; var newPoints = new NFP(); newPoints.AddPoint(prevPoint); SvgPoint point = null; int i = 1; for (var len = points.length; i < len; i++) { point = points[i]; if (point.marked || getSqDist(point, prevPoint) > sqTolerance) { newPoints.AddPoint(point); prevPoint = point; } } if (prevPoint != point) { newPoints.AddPoint(point); } return(newPoints); }
// use the clipper library to return an offset to the given polygon. Positive offset expands the polygon, negative contracts // note that this returns an array of polygons public static NFP[] polygonOffsetDeepNest(NFP polygon, double offset) { if (offset == 0 || GeometryUtil._almostEqual(offset, 0)) { return(new[] { polygon }); } var p = svgToClipper(polygon).ToList(); var miterLimit = 4; var co = new ClipperLib.ClipperOffset(miterLimit, Config.curveTolerance * Config.clipperScale); co.AddPath(p.ToList(), ClipperLib.JoinType.jtMiter, ClipperLib.EndType.etClosedPolygon); var newpaths = new List <List <ClipperLib.IntPoint> >(); co.Execute(ref newpaths, offset * Config.clipperScale); var result = new List <NFP>(); for (var i = 0; i < newpaths.Count; i++) { result.Add(clipperToSvg(newpaths[i])); } return(result.ToArray()); }
// converts a polygon from normal float coordinates to integer coordinates used by clipper, as well as x/y -> X/Y public static ClipperLib.IntPoint[] svgToClipper(NFP polygon) { var d = _Clipper.ScaleUpPaths(polygon, Config.clipperScale); return(d.ToArray()); return(polygon.Points.Select(z => new IntPoint((long)z.x, (long)z.y)).ToArray()); }
public static bool pointInPolygon(SvgPoint point, NFP polygon) { // scaling is deliberately coarse to filter out points that lie *on* the polygon var p = svgToClipper2(polygon, 1000); var pt = new ClipperLib.IntPoint(1000 * point.x, 1000 * point.y); return(ClipperLib.Clipper.PointInPolygon(pt, p.ToList()) > 0); }
public static SvgPoint getTarget(SvgPoint o, NFP simple, double tol) { List <InrangeItem> inrange = new List <InrangeItem>(); // find closest points within 2 offset deltas for (var j = 0; j < simple.length; j++) { var s = simple[j]; var d2 = (o.x - s.x) * (o.x - s.x) + (o.y - s.y) * (o.y - s.y); if (d2 < tol * tol) { inrange.Add(new InrangeItem() { point = s, distance = d2 }); } } SvgPoint target = null; if (inrange.Count > 0) { var filtered = inrange.Where((p) => { return(p.point.exact); }).ToList(); // use exact points when available, normal points when not inrange = filtered.Count > 0 ? filtered : inrange; inrange = inrange.OrderBy((b) => { return(b.distance); }).ToList(); target = inrange[0].point; } else { double?mind = null; for (int j = 0; j < simple.length; j++) { var s = simple[j]; var d2 = (o.x - s.x) * (o.x - s.x) + (o.y - s.y) * (o.y - s.y); if (mind == null || d2 < mind) { target = s; mind = d2; } } } return(target); }
public static int?find(SvgPoint v, NFP p) { for (var i = 0; i < p.length; i++) { if (GeometryUtil._withinDistance(v, p[i], Config.curveTolerance / 1000)) { return(i); } } return(null); }
public NFP slice(int v) { var ret = new NFP(); List <SvgPoint> pp = new List <SvgPoint>(); for (int i = v; i < length; i++) { pp.Add(new SvgPoint(this[i].x, this[i].y)); } ret.Points = pp.ToArray(); return(ret); }
// simplification using Ramer-Douglas-Peucker algorithm public static NFP simplifyDouglasPeucker(NFP points, double?sqTolerance) { var last = points.length - 1; var simplified = new NFP(); simplified.AddPoint(points[0]); simplifyDPStep(points, 0, last, sqTolerance, simplified); simplified.push(points[last]); return(simplified); }
public static NFP GetMinimumBox(NFP vv) { var hull = Background.getHull(new NFP() { Points = vv.Points.Select(z => new SvgPoint(z.x, z.y)).ToArray() }); double minArea = double.MaxValue; List <SvgPoint> rect = new List <SvgPoint>(); for (int i = 0; i < hull.Length; i++) { var p0 = hull.Points[i]; var p1 = hull.Points[(i + 1) % hull.Length]; var dx = p1.x - p0.x; var dy = p1.y - p0.y; var atan = Math.Atan2(dy, dx); List <SvgPoint> dd = new List <SvgPoint>(); for (int j = 0; j < vv.Length; j++) { var r = RotatePoint(new SvgPoint(vv[j].x, vv[j].y), 0, 0, -atan); dd.Add(r); } var maxx = dd.Max(z => z.x); var maxy = dd.Max(z => z.y); var minx = dd.Min(z => z.x); var miny = dd.Min(z => z.y); var area = (maxx - minx) * (maxy - miny); if (area < minArea) { minArea = area; rect.Clear(); rect.Add(new SvgPoint(minx, miny)); rect.Add(new SvgPoint(maxx, miny)); rect.Add(new SvgPoint(maxx, maxy)); rect.Add(new SvgPoint(minx, maxy)); for (int j = 0; j < rect.Count; j++) { rect[j] = RotatePoint(new SvgPoint(rect[j].x, rect[j].y), 0, 0, atan); } } } NFP ret = new NFP(); ret.Points = rect.ToArray(); return(ret); }
public void AddRectanglePart(int src, int ww = 50, int hh = 80) { int xx = 0; int yy = 0; NFP pl = new NFP(); Polygons.Add(pl); pl.source = src; pl.Points = new SvgPoint[] { }; pl.AddPoint(new SvgPoint(xx, yy)); pl.AddPoint(new SvgPoint(xx + ww, yy)); pl.AddPoint(new SvgPoint(xx + ww, yy + hh)); pl.AddPoint(new SvgPoint(xx, yy + hh)); }
// both algorithms combined for awesome performance public static NFP simplify(NFP points, double?tolerance, bool highestQuality) { if (points.length <= 2) { return(points); } var sqTolerance = (tolerance != null) ? (tolerance * tolerance) : 1; points = highestQuality ? points : simplifyRadialDist(points, sqTolerance); points = simplifyDouglasPeucker(points, sqTolerance); return(points); }
static NFP boundingBox(NFP offset) { NFP ret = new NFP(); var maxx = offset.Points.Max(z => z.x); var maxy = offset.Points.Max(z => z.y); var minx = offset.Points.Min(z => z.x); var miny = offset.Points.Min(z => z.y); ret.AddPoint(new SvgPoint(minx, miny)); ret.AddPoint(new SvgPoint(maxx, miny)); ret.AddPoint(new SvgPoint(maxx, maxy)); ret.AddPoint(new SvgPoint(minx, maxy)); return(ret); }
} // 2 secs public static ClipperLib.IntPoint[] ScaleUpPaths(NFP p, double scale = 1) { List <ClipperLib.IntPoint> ret = new List <ClipperLib.IntPoint>(); for (int i = 0; i < p.Points.Count(); i++) { //p.Points[i] = new SvgNestPort.SvgPoint((float)Math.Round(p.Points[i].x * scale), (float)Math.Round(p.Points[i].y * scale)); ret.Add(new ClipperLib.IntPoint( (long)Math.Round((decimal)p.Points[i].x * (decimal)scale), (long)Math.Round((decimal)p.Points[i].y * (decimal)scale) )); } return(ret.ToArray()); }
public static NFP clone(NFP p) { var newp = new NFP(); for (var i = 0; i < p.length; i++) { newp.AddPoint(new SvgPoint( p[i].x, p[i].y )); } return(newp); }
public static IntPoint[] toClipperCoordinates(NFP polygon) { var clone = new List <IntPoint>(); for (var i = 0; i < polygon.length; i++) { clone.Add (new IntPoint( polygon[i].x, polygon[i].y )); } return(clone.ToArray()); }
// offset tree recursively public static void offsetTree(NFP t, double offset, SvgNestConfig config, bool?inside = null) { var simple = t; simple = simplifyFunction(t, (inside == null) ? false : inside.Value); var offsetpaths = new NFP[] { simple }; if (offset > 0) { offsetpaths = polygonOffsetDeepNest(simple, offset); } if (offsetpaths.Count() > 0) { List <SvgPoint> rett = new List <SvgPoint>(); rett.AddRange(offsetpaths[0].Points); rett.AddRange(t.Points.Skip(t.length)); t.Points = rett.ToArray(); // replace array items in place //Array.prototype.splice.apply(t, [0, t.length].concat(offsetpaths[0])); } if (simple.children != null && simple.children.Count > 0) { if (t.children == null) { t.children = new List <NFP>(); } for (var i = 0; i < simple.children.Count; i++) { t.children.Add(simple.children[i]); } } if (t.children != null && t.children.Count > 0) { for (var i = 0; i < t.children.Count; i++) { offsetTree(t.children[i], -offset, config, (inside == null) ? true : (!inside)); } } }
// returns true if any complex vertices fall outside the simple polygon public static bool exterior(NFP simple, NFP complex, bool inside) { // find all protruding vertices for (var i = 0; i < complex.length; i++) { var v = complex[i]; if (!inside && !pointInPolygon(v, simple) && find(v, simple) == null) { return(true); } if (inside && pointInPolygon(v, simple) && find(v, simple) != null) { return(true); } } return(false); }
public static NFP cleanPolygon2(NFP polygon) { var p = svgToClipper(polygon); // remove self-intersections and find the biggest polygon that's left var simple = ClipperLib.Clipper.SimplifyPolygon(p.ToList(), ClipperLib.PolyFillType.pftNonZero); if (simple == null || simple.Count == 0) { return(null); } var biggest = simple[0]; var biggestarea = Math.Abs(ClipperLib.Clipper.Area(biggest)); for (var i = 1; i < simple.Count; i++) { var area = Math.Abs(ClipperLib.Clipper.Area(simple[i])); if (area > biggestarea) { biggest = simple[i]; biggestarea = area; } } // clean up singularities, coincident points and edges var clean = ClipperLib.Clipper.CleanPolygon(biggest, 0.01 * Config.curveTolerance * Config.clipperScale); if (clean == null || clean.Count == 0) { return(null); } var cleaned = clipperToSvg(clean); // remove duplicate endpoints var start = cleaned[0]; var end = cleaned[cleaned.length - 1]; if (start == end || (GeometryUtil._almostEqual(start.x, end.x) && GeometryUtil._almostEqual(start.y, end.y))) { cleaned.Points = cleaned.Points.Take(cleaned.Points.Count() - 1).ToArray(); } return(cleaned); }
/// <summary> /// Clip the subject so it stays inside the clipBounds. /// </summary> /// <param name="subject"></param> /// <param name="clipBounds"></param> /// <param name="clipperScale"></param> /// <returns></returns> internal static NFP ClipSubject(NFP subject, NFP clipBounds, double clipperScale) { var clipperSubject = Background.innerNfpToClipperCoordinates(new NFP[] { subject }, SvgNest.Config); var clipperClip = Background.innerNfpToClipperCoordinates(new NFP[] { clipBounds }, SvgNest.Config); var clipper = new Clipper(); clipper.AddPaths(clipperClip.Select(z => z.ToList()).ToList(), PolyType.ptClip, true); clipper.AddPaths(clipperSubject.Select(z => z.ToList()).ToList(), PolyType.ptSubject, true); List <List <IntPoint> > finalNfp = new List <List <IntPoint> >(); if (clipper.Execute(ClipType.ctIntersection, finalNfp, PolyFillType.pftNonZero, PolyFillType.pftNonZero) && finalNfp != null && finalNfp.Count > 0) { return(Background.toNestCoordinates(finalNfp[0].ToArray(), clipperScale)); } return(subject); }
public NFP ImportFromRawDetail(RawDetail raw, int src) { NFP po = null; List <NFP> nfps = new List <NFP>(); foreach (var item in raw.Outers) { var nn = new NFP(); nfps.Add(nn); foreach (var pitem in item.Points) { nn.AddPoint(new SvgPoint(pitem.X, pitem.Y)); } } if (nfps.Any()) { var tt = nfps.OrderByDescending(z => z.Area).First(); po = tt; po.Name = raw.Name; foreach (var r in nfps) { if (r == tt) { continue; } if (po.children == null) { po.children = new List <NFP>(); } po.children.Add(r); } po.source = src; Polygons.Add(po); } return(po); }
public static void simplifyDPStep(NFP points, int first, int last, double?sqTolerance, NFP simplified) { var maxSqDist = sqTolerance; var index = -1; var marked = false; for (var i = first + 1; i < last; i++) { var sqDist = getSqSegDist(points[i], points[first], points[last]); if (sqDist > maxSqDist) { index = i; maxSqDist = sqDist; } /*if(points[i].marked && maxSqDist <= sqTolerance){ * index = i; * marked = true; * }*/ } /*if(!points[index] && maxSqDist > sqTolerance){ * console.log('shit shit shit'); * }*/ if (maxSqDist > sqTolerance || marked) { if (index - first > 1) { simplifyDPStep(points, first, index, sqTolerance, simplified); } simplified.push(points[index]); if (last - index > 1) { simplifyDPStep(points, index, last, sqTolerance, simplified); } } }
public NFP ToNfp() { NFP po = null; List <NFP> nfps = new List <NFP>(); foreach (var item in Outers) { var nn = new NFP(); nfps.Add(nn); foreach (var pitem in item.Points) { nn.AddPoint(new SvgPoint(pitem.X, pitem.Y)); } } if (nfps.Any()) { var tt = nfps.OrderByDescending(z => z.Area).First(); po = tt; po.Name = Name; foreach (var r in nfps) { if (r == tt) { continue; } if (po.children == null) { po.children = new List <NFP>(); } po.children.Add(r); } } return(po); }
public static NFP cloneTree(NFP tree) { NFP newtree = new NFP(); foreach (var t in tree.Points) { newtree.AddPoint(new SvgPoint(t.x, t.y) { exact = t.exact }); } if (tree.children != null && tree.children.Count > 0) { newtree.children = new List <NFP>(); foreach (var c in tree.children) { newtree.children.Add(cloneTree(c)); } } return(newtree); }
public static NFP simplifyFunction(NFP polygon, bool inside) { var tolerance = 4 * Config.curveTolerance; // give special treatment to line segments above this length (squared) var fixedTolerance = 40 * Config.curveTolerance * 40 * Config.curveTolerance; int i, j, k; if (Config.simplify) { /* * // use convex hull * var hull = new ConvexHullGrahamScan(); * for(var i=0; i<polygon.length; i++){ * hull.addPoint(polygon[i].x, polygon[i].y); * } * * return hull.getHull();*/ var hull = Background.getHull(polygon); if (hull != null) { return(hull); } else { return(polygon); } } var cleaned = cleanPolygon2(polygon); if (cleaned != null && cleaned.length > 1) { polygon = cleaned; } else { return(polygon); } // polygon to polyline var copy = polygon.slice(0); copy.push(copy[0]); // mark all segments greater than ~0.25 in to be kept // the PD simplification algo doesn't care about the accuracy of long lines, only the absolute distance of each point // we care a great deal for (i = 0; i < copy.length - 1; i++) { var p1 = copy[i]; var p2 = copy[i + 1]; var sqd = (p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y); if (sqd > fixedTolerance) { p1.marked = true; p2.marked = true; } } var simple = Simplify.simplify(copy, tolerance, true); // now a polygon again //simple.pop(); simple.Points = simple.Points.Take(simple.Points.Count() - 1).ToArray(); // could be dirty again (self intersections and/or coincident points) simple = cleanPolygon2(simple); // simplification process reduced poly to a line or point if (simple == null) { simple = polygon; } var offsets = polygonOffsetDeepNest(simple, inside ? -tolerance : tolerance); NFP offset = null; double offsetArea = 0; List <NFP> holes = new List <NFP>(); for (i = 0; i < offsets.Length; i++) { var area = GeometryUtil.polygonArea(offsets[i]); if (offset == null || area < offsetArea) { offset = offsets[i]; offsetArea = area; } if (area > 0) { holes.Add(offsets[i]); } } // mark any points that are exact for (i = 0; i < simple.length; i++) { var seg = new NFP(); seg.AddPoint(simple[i]); seg.AddPoint(simple[i + 1 == simple.length ? 0 : i + 1]); var index1 = find(seg[0], polygon); var index2 = find(seg[1], polygon); if (index1 + 1 == index2 || index2 + 1 == index1 || (index1 == 0 && index2 == polygon.length - 1) || (index2 == 0 && index1 == polygon.length - 1)) { seg[0].exact = true; seg[1].exact = true; } } var numshells = 4; NFP[] shells = new NFP[numshells]; for (j = 1; j < numshells; j++) { var delta = j * (tolerance / numshells); delta = inside ? -delta : delta; var shell = polygonOffsetDeepNest(simple, delta); if (shell.Count() > 0) { shells[j] = shell.First(); } else { //shells[j] = shell; } } if (offset == null) { return(polygon); } // selective reversal of offset for (i = 0; i < offset.length; i++) { var o = offset[i]; var target = getTarget(o, simple, 2 * tolerance); // reverse point offset and try to find exterior points var test = clone(offset); test.Points[i] = new SvgPoint(target.x, target.y); if (!exterior(test, polygon, inside)) { o.x = target.x; o.y = target.y; } else { // a shell is an intermediate offset between simple and offset for (j = 1; j < numshells; j++) { if (shells[j] != null) { var shell = shells[j]; var delta = j * (tolerance / numshells); target = getTarget(o, shell, 2 * delta); test = clone(offset); test.Points[i] = new SvgPoint(target.x, target.y); if (!exterior(test, polygon, inside)) { o.x = target.x; o.y = target.y; break; } } } } } // straighten long lines // a rounded rectangle would still have issues at this point, as the long sides won't line up straight var straightened = false; for (i = 0; i < offset.length; i++) { var p1 = offset[i]; var p2 = offset[i + 1 == offset.length ? 0 : i + 1]; var sqd = (p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y); if (sqd < fixedTolerance) { continue; } for (j = 0; j < simple.length; j++) { var s1 = simple[j]; var s2 = simple[j + 1 == simple.length ? 0 : j + 1]; var sqds = (p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y); if (sqds < fixedTolerance) { continue; } if ((GeometryUtil._almostEqual(s1.x, s2.x) || GeometryUtil._almostEqual(s1.y, s2.y)) && // we only really care about vertical and horizontal lines GeometryUtil._withinDistance(p1, s1, 2 * tolerance) && GeometryUtil._withinDistance(p2, s2, 2 * tolerance) && (!GeometryUtil._withinDistance(p1, s1, Config.curveTolerance / 1000) || !GeometryUtil._withinDistance(p2, s2, Config.curveTolerance / 1000))) { p1.x = s1.x; p1.y = s1.y; p2.x = s2.x; p2.y = s2.y; straightened = true; } } } //if(straightened){ var Ac = _Clipper.ScaleUpPaths(offset, 10000000); var Bc = _Clipper.ScaleUpPaths(polygon, 10000000); var combined = new List <List <IntPoint> >(); var clipper = new ClipperLib.Clipper(); clipper.AddPath(Ac.ToList(), ClipperLib.PolyType.ptSubject, true); clipper.AddPath(Bc.ToList(), ClipperLib.PolyType.ptSubject, true); // the line straightening may have made the offset smaller than the simplified if (clipper.Execute(ClipperLib.ClipType.ctUnion, combined, ClipperLib.PolyFillType.pftNonZero, ClipperLib.PolyFillType.pftNonZero)) { double?largestArea = null; for (i = 0; i < combined.Count; i++) { var n = Background.toNestCoordinates(combined[i].ToArray(), 10000000); var sarea = -GeometryUtil.polygonArea(n); if (largestArea == null || largestArea < sarea) { offset = n; largestArea = sarea; } } } //} cleaned = cleanPolygon2(offset); if (cleaned != null && cleaned.length > 1) { offset = cleaned; } // mark any points that are exact (for line merge detection) for (i = 0; i < offset.length; i++) { var seg = new SvgPoint[] { offset[i], offset[i + 1 == offset.length ? 0 : i + 1] }; var index1 = find(seg[0], polygon); var index2 = find(seg[1], polygon); if (index1 == null) { index1 = 0; } if (index2 == null) { index2 = 0; } if (index1 + 1 == index2 || index2 + 1 == index1 || (index1 == 0 && index2 == polygon.length - 1) || (index2 == 0 && index1 == polygon.length - 1)) { seg[0].exact = true; seg[1].exact = true; } } if (!inside && holes != null && holes.Count > 0) { offset.children = holes; } return(offset); }
//解的迭代更新 public void NestIterate() { List <NFP> lsheets = new List <NFP>(); List <NFP> lpoly = new List <NFP>(); for (int i = 0; i < Polygons.Count; i++) { Polygons[i].id = i; } for (int i = 0; i < Sheets.Count; i++) { Sheets[i].id = i; } foreach (var item in Polygons) { NFP clone = new NFP(); clone.id = item.id; clone.source = item.source; clone.Points = item.Points.Select(z => new SvgPoint(z.x, z.y) { exact = z.exact }).ToArray(); if (item.children != null) { clone.children = new List <NFP>(); foreach (var citem in item.children) { clone.children.Add(new NFP()); var l = clone.children.Last(); l.id = citem.id; l.source = citem.source; l.Points = citem.Points.Select(z => new SvgPoint(z.x, z.y) { exact = z.exact }).ToArray(); } } lpoly.Add(clone); } foreach (var item in Sheets) { NFP clone = new NFP(); clone.id = item.id; clone.source = item.source; clone.Points = item.Points.Select(z => new SvgPoint(z.x, z.y) { exact = z.exact }).ToArray(); if (item.children != null) { clone.children = new List <NFP>(); foreach (var citem in item.children) { clone.children.Add(new NFP()); var l = clone.children.Last(); l.id = citem.id; l.source = citem.source; l.Points = citem.Points.Select(z => new SvgPoint(z.x, z.y) { exact = z.exact }).ToArray(); } } lsheets.Add(clone); } if (offsetTreePhase) { var grps = lpoly.GroupBy(z => z.source).ToArray(); if (Background.UseParallel) { Parallel.ForEach(grps, (item) => { SvgNest.offsetTree(item.First(), 0.5 * SvgNest.Config.spacing, SvgNest.Config); foreach (var zitem in item) { zitem.Points = item.First().Points.ToArray(); } }); } else { foreach (var item in grps) { SvgNest.offsetTree(item.First(), 0.5 * SvgNest.Config.spacing, SvgNest.Config); foreach (var zitem in item) { zitem.Points = item.First().Points.ToArray(); } } } foreach (var item in lsheets) { SvgNest.offsetTree(item, -0.5 * SvgNest.Config.spacing, SvgNest.Config, true); } } List <NestItem> partsLocal = new List <NestItem>(); var p1 = lpoly.GroupBy(z => z.source).Select(z => new NestItem() { Polygon = z.First(), IsSheet = false, Quanity = z.Count() }); var p2 = lsheets.GroupBy(z => z.source).Select(z => new NestItem() { Polygon = z.First(), IsSheet = true, Quanity = z.Count() }); partsLocal.AddRange(p1); partsLocal.AddRange(p2); int srcc = 0; foreach (var item in partsLocal) { item.Polygon.source = srcc++; } Nest.launchWorkers(partsLocal.ToArray()); var plcpr = Nest.nests.First(); if (current == null || plcpr.fitness < current.fitness) { AssignPlacement(plcpr); } Iterations++; }
// converts a polygon from normal float coordinates to integer coordinates used by clipper, as well as x/y -> X/Y public static IntPoint[] svgToClipper2(NFP polygon, double?scale = null) { var d = _Clipper.ScaleUpPaths(polygon, scale == null ? Config.clipperScale : scale.Value); return(d.ToArray()); }
public static NFP simplifyFunction(NFP polygon, bool inside) { return(simplifyFunction(polygon, inside, SvgNest.Config)); }