/// <summary> /// Generates normals using k-nearest neighbours. /// </summary> public static PointSet GenerateNormals(this PointSet self, ImportConfig config) { if (config.EstimateNormals == null) { return(self); } var nodeCount = self.Root.Value.CountNodes(); var processedNodesCount = 0L; var result = self.GenerateNormals(() => { config.CancellationToken.ThrowIfCancellationRequested(); var i = Interlocked.Increment(ref processedNodesCount); if (config.Verbose) { Console.Write($"[Normals] {i}/{nodeCount}\r"); } if (i % 100 == 0) { config.ProgressCallback(i / (double)nodeCount); } }, config); config.ProgressCallback(1.0); return(result); }
/// <summary> /// Returns new octree with LOD data created. /// </summary> public static PointSet GenerateLod(this PointSet self, ImportConfig config) { if (self.Root == null) { return(self); } var nodeCount = self.Root?.Value?.CountNodes(true) ?? 0; var loddedNodesCount = 0L; var result = self.GenerateLod(config.Key, () => { config.CancellationToken.ThrowIfCancellationRequested(); var i = Interlocked.Increment(ref loddedNodesCount); if (config.Verbose) { Console.Write($"[Lod] {i}/{nodeCount}\r"); } if (i % 100 == 0) { config.ProgressCallback(loddedNodesCount / (double)nodeCount); } }, config.CancellationToken); result.Wait(); config.ProgressCallback(1.0); return(result.Result); }
/// <summary> /// </summary> public static PointSet GenerateLod(this PointSet self, ImportConfig config) { if (config.CreateOctreeLod == false) { return(self); } var nodeCount = self.Root.Value.CountNodes(); var loddedNodesCount = 0L; var result = self.GenerateLod(config.Key, () => { config.CancellationToken.ThrowIfCancellationRequested(); var i = Interlocked.Increment(ref loddedNodesCount); if (config.Verbose) { Console.Write($"[Lod] {i}/{nodeCount}\r"); } if (i % 100 == 0) { config.ProgressCallback(i / (double)nodeCount); } }, config.MaxDegreeOfParallelism, config.CancellationToken); config.ProgressCallback(1.0); return(result); }
/// <summary> /// Imports sequence of chunks. /// </summary> public static PointSet Chunks(IEnumerable <Chunk> chunks, ImportConfig config) { // optionally filter minDist if (config.MinDist > 0.0) { chunks = chunks.Select(x => x.ImmutableFilterSequentialMinDistL1(config.MinDist)); } // optionally deduplicate points if (config.DeduplicateChunks) { chunks = chunks.Select(x => x.ImmutableDeduplicate()); } // optionally reproject positions and/or estimate normals if (config.Reproject != null || config.EstimateNormals != null) { Chunk map(Chunk x, CancellationToken ct) { if (config.Reproject != null) { var ps = config.Reproject(x.Positions); x = x.WithPositions(ps); } if (config.EstimateNormals != null) { var ns = config.EstimateNormals(x.Positions); x = x.WithNormals(ns); } return(x); } chunks = chunks.MapParallel(map, config.MaxDegreeOfParallelism, null, config.CancellationToken); } // reduce all chunks to single PointSet var final = chunks .MapReduce(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(x * 0.66))) ; // optionally create LOD data if (config.CreateOctreeLod) { final = final.GenerateLod(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(0.66 + x * 0.34))); } // create final point set with specified key (or random key when no key is specified) var key = config.Key ?? Guid.NewGuid().ToString(); final = new PointSet(config.Storage, key, final?.Root?.Value?.Id, config.OctreeSplitLimit); config.Storage.Add(key, final, config.CancellationToken); return(final); }
/// <summary> /// Imports sequence of chunks. /// </summary> public static PointSet Chunks(IEnumerable <Chunk> chunks, ImportConfig config) { config?.ProgressCallback(0.0); // deduplicate points chunks = chunks.Select(x => x.ImmutableDeduplicate(config.Verbose)); // merge small chunks chunks = MergeSmall(config.MaxChunkPointCount, chunks); // filter minDist if (config.MinDist > 0.0) { if (config.NormalizePointDensityGlobal) { chunks = chunks.Select(x => x.ImmutableFilterMinDistByCell(new Cell(x.BoundingBox), config.ParseConfig)); } else { chunks = chunks.Select(x => x.ImmutableFilterSequentialMinDistL1(config.MinDist)); } } // merge small chunks chunks = MergeSmall(config.MaxChunkPointCount, chunks); // EXPERIMENTAL //Report.BeginTimed("unmix"); //chunks = chunks.ImmutableUnmixOutOfCore(@"T:\tmp", 1, config); //Report.End(); // reproject positions and/or estimate normals if (config.Reproject != null) { Chunk map(Chunk x, CancellationToken ct) { if (config.Reproject != null) { var ps = config.Reproject(x.Positions); x = x.WithPositions(ps); } return(x); } chunks = chunks.MapParallel(map, config.MaxDegreeOfParallelism, null, config.CancellationToken); } // reduce all chunks to single PointSet Report.BeginTimed("map/reduce"); var final = chunks .MapReduce(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(0.01 + x * 0.65))) ; Report.EndTimed(); // create LOD data Report.BeginTimed("generate lod"); final = final.GenerateLod(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(0.66 + x * 0.34))); if (final.Root != null && config.Storage.GetPointCloudNode(final.Root.Value.Id) == null) { throw new InvalidOperationException("Invariant 4d633e55-bf84-45d7-b9c3-c534a799242e."); } Report.End(); // create final point set with specified key (or random key when no key is specified) var key = config.Key ?? Guid.NewGuid().ToString(); #pragma warning disable CS0618 // Type or member is obsolete final = new PointSet(config.Storage, key, final?.Root?.Value?.Id, config.OctreeSplitLimit); #pragma warning restore CS0618 // Type or member is obsolete config.Storage.Add(key, final); return(final); }
/// <summary> /// Maps a sequence of point chunks to point sets, which are then reduced to one single point set. /// </summary> public static PointSet MapReduce(this IEnumerable <Chunk> chunks, ImportConfig config) { //var foo = chunks.ToArray(); var totalChunkCount = 0; var totalPointCountInChunks = 0L; Action <double> progress = x => config.ProgressCallback(x * 0.5); #region MAP: create one PointSet for each chunk var pointsets = chunks .MapParallel((chunk, ct2) => { Interlocked.Add(ref totalPointCountInChunks, chunk.Count); progress(Math.Sqrt(1.0 - 1.0 / Interlocked.Increment(ref totalChunkCount))); var builder = InMemoryPointSet.Build(chunk, config.OctreeSplitLimit); var root = builder.ToPointSetNode(config.Storage, isTemporaryImportNode: true); var id = $"Aardvark.Geometry.PointSet.{Guid.NewGuid()}.json"; var pointSet = new PointSet(config.Storage, id, root.Id, config.OctreeSplitLimit); return(pointSet); }, config.MaxDegreeOfParallelism, null, config.CancellationToken ) .ToList() ; ; if (config.Verbose) { Console.WriteLine($"[MapReduce] pointsets : {pointsets.Count}"); Console.WriteLine($"[MapReduce] totalPointCountInChunks: {totalPointCountInChunks}"); } #endregion #region REDUCE: pairwise octree merge until a single (final) octree remains progress = x => config.ProgressCallback(0.5 + x * 0.5); var i = 0; var fractionalProgress = new Dictionary <int, double>(); var totalPointsToMerge = pointsets.Sum(x => x.PointCount); if (config.Verbose) { Console.WriteLine($"[MapReduce] totalPointsToMerge: {totalPointsToMerge}"); } var totalPointSetsCount = pointsets.Count; if (totalPointSetsCount == 0) { var empty = new PointSet(config.Storage, config.Key ?? Guid.NewGuid().ToString()); config.Storage.Add(config.Key, empty); return(empty); } var doneCount = 0; var parts = new HashSet <PointSet>(pointsets); var final = pointsets.MapReduceParallel((first, second, ct2) => { lock (parts) { if (!parts.Remove(first)) { throw new InvalidOperationException("map reduce error"); } if (!parts.Remove(second)) { throw new InvalidOperationException("map reduce error"); } } var id = Interlocked.Increment(ref i); var firstPlusSecondPointCount = first.PointCount + second.PointCount; var lastN = 0L; var merged = first.Merge(second, n => { //Console.WriteLine($"[MERGE CALLBACK][{id}] {n:N0}"); if (n > lastN) { lastN = n; var p = 0.0; lock (fractionalProgress) { fractionalProgress[id] = n / (double)firstPlusSecondPointCount; p = 1.0 / (totalPointSetsCount - (doneCount + fractionalProgress.Values.Sum())); } progress(p); } }, config.WithCancellationToken(ct2) ); lock (fractionalProgress) { fractionalProgress.Remove(id); Interlocked.Increment(ref doneCount); } //Console.WriteLine($"[MERGE CALLBACK][{id}] {(first.PointCount + second.PointCount) / (double)totalPointsToMerge,7:N3}"); lock (parts) { parts.Add(merged); } config.Storage.Add(merged.Id, merged); if (config.Verbose) { Console.WriteLine($"[MapReduce] merged " + $"{formatCell(first.Root.Value.Cell)} + {formatCell(second.Root.Value.Cell)} -> {formatCell(merged.Root.Value.Cell)} " + $"({first.Root.Value.PointCountTree:N0} + {second.Root.Value.PointCountTree:N0} -> {merged.Root.Value.PointCountTree:N0})" ); } if (merged.Root.Value.PointCountTree == 0) { throw new InvalidOperationException(); } return(merged); }, config.MaxDegreeOfParallelism ); if (config.Verbose) { Console.WriteLine($"[MapReduce] everything merged"); } config.CancellationToken.ThrowIfCancellationRequested(); #endregion config.Storage.Add(config.Key, final); config.ProgressCallback(1.0); return(final); string formatCell(Cell c) => c.IsCenteredAtOrigin ? $"[centered, {c.Exponent}]" : c.ToString(); }
/// <summary> /// Maps a sequence of point chunks to point sets, which are then reduced to one single point set. /// </summary> public static PointSet MapReduce(this IEnumerable <Chunk> chunks, ImportConfig config) { var totalChunkCount = 0; var totalPointCountInChunks = 0L; Action <double> progress = x => config.ProgressCallback(x * 0.5); #region MAP: create one PointSet for each chunk var pointsets = chunks .MapParallel((chunk, ct2) => { Interlocked.Add(ref totalPointCountInChunks, chunk.Count); progress(1.0 - 1.0 / Interlocked.Increment(ref totalChunkCount)); var builder = InMemoryPointSet.Build(chunk, config.OctreeSplitLimit); var root = builder.ToPointSetCell(config.Storage, ct: ct2); var id = $"Aardvark.Geometry.PointSet.{Guid.NewGuid()}.json"; var pointSet = new PointSet(config.Storage, id, root.Id, config.OctreeSplitLimit); return(pointSet); }, config.MaxDegreeOfParallelism, null, config.CancellationToken ) .ToList() ; ; if (config.Verbose) { Console.WriteLine($"[MapReduce] pointsets : {pointsets.Count}"); Console.WriteLine($"[MapReduce] totalPointCountInChunks: {totalPointCountInChunks}"); } #endregion #region REDUCE: pairwise octree merge until a single (final) octree remains progress = x => config.ProgressCallback(0.5 + x * 0.5); var i = 0; var totalPointsToMerge = pointsets.Sum(x => x.PointCount); if (config.Verbose) { Console.WriteLine($"[MapReduce] totalPointsToMerge: {totalPointsToMerge}"); } var totalPointSetsCount = pointsets.Count; if (totalPointSetsCount == 0) { var empty = new PointSet(config.Storage, config.Key ?? Guid.NewGuid().ToString()); config.Storage.Add(config.Key, empty, config.CancellationToken); return(empty); } var final = pointsets.MapReduceParallel((first, second, ct2) => { progress(Interlocked.Increment(ref i) / (double)totalPointSetsCount); var merged = first.Merge(second, ct2); config.Storage.Add(merged.Id, merged, ct2); if (config.Verbose) { Console.WriteLine($"[MapReduce] merged " + $"{first.Root.Value.Cell} + {second.Root.Value.Cell} -> {merged.Root.Value.Cell} " + $"({first.Root.Value.PointCountTree} + {second.Root.Value.PointCountTree} -> {merged.Root.Value.PointCountTree})" ); } if (merged.Root.Value.PointCountTree == 0) { throw new InvalidOperationException(); } return(merged); }, config.MaxDegreeOfParallelism ); if (config.Verbose) { Console.WriteLine($"[MapReduce] everything merged"); } config.CancellationToken.ThrowIfCancellationRequested(); #endregion config.Storage.Add(config.Key, final, config.CancellationToken); config.ProgressCallback(1.0); return(final); }