public NodeProcessorDispatcher(string outputPath, TreeImportSettings settings) { this.outputPath = outputPath; Settings = settings; var pcExtensions = new List <string> { TreeUtility.NodeFileExtension, TreeUtility.IndexFileExtension }; if (Directory.Exists(outputPath)) { var matchingFiles = Directory .EnumerateFiles(outputPath, "*.*", SearchOption.TopDirectoryOnly) .Where(x => Path.GetExtension(x) != null && pcExtensions.Contains(Path.GetExtension(x).ToLowerInvariant())) .ToList(); foreach (var file in matchingFiles) { File.Delete(file); } } Directory.CreateDirectory(outputPath); outputTmpPath = Path.Combine(outputPath, "tmp"); Directory.CreateDirectory(outputTmpPath); points = new PointCloudPoint[settings.chunkSize]; var maxArraySize = TreeUtility.CalculateMaxArraySize(UnsafeUtility.SizeOf <PointCloudPoint>()); PublicMaxSizeBuffer = new PointCloudPoint[maxArraySize]; }
public override PointCloudVerticalHistogram GenerateHistogram(PointCloudBounds bounds) { var fileName = Path.GetFileName(FilePath); long currentOffset = header.PointDataOffset; long processed = 0; long count = header.PointDataCount; if (header.VersionMajor > 1 || header.VersionMajor == 1 && header.VersionMinor >= 4) { if (count == 0) { count = (long)header.PointDataCountLong; } } var result = new PointCloudVerticalHistogram(bounds); using (var file = MemoryMappedFile.CreateFromFile(FilePath ?? throw new Exception("Input file not found."), FileMode.Open)) { var batchIndex = 0; var maxArraySize = TreeUtility.CalculateMaxArraySize(header.PointDataSize); var totalBatchCount = Mathf.CeilToInt((float)count / maxArraySize); while (processed < count) { var batchCount = Math.Min(maxArraySize, count - processed); var batchSize = batchCount * header.PointDataSize; using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read)) { unsafe { batchIndex++; var progressBarTitle = $"Generating histogram ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})"; var hst = PointImportJobs.GenerateHistogramLas(view, batchCount, header, bounds, progressBarTitle); result.AddData(hst.regions); } } processed += batchCount; currentOffset += batchSize; } } return(result); }
///<inheritdoc/> public override bool ConvertPoints(NodeProcessorDispatcher target, TransformationData transformationData) { var fileName = Path.GetFileName(FilePath); long currentOffset = header.PointDataOffset; long processed = 0; long count = header.PointDataCount; if (header.VersionMajor > 1 || header.VersionMajor == 1 && header.VersionMinor >= 4) { if (count == 0) { count = (long)header.PointDataCountLong; } } using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open)) { var batchIndex = 0; var maxArraySize = TreeUtility.CalculateMaxArraySize(header.PointDataSize); var totalBatchCount = Mathf.CeilToInt((float)count / maxArraySize); while (processed < count) { var batchCount = Math.Min(maxArraySize, count - processed); var batchSize = batchCount * header.PointDataSize; using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read)) { batchIndex++; var progressBarTitle = $"Converting ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})"; var targetBuffer = target.PublicMaxSizeBuffer; PointImportJobs.ConvertLasData(view, targetBuffer, header.PointDataSize, (int)batchCount, ref header, transformationData, progressBarTitle); target.AddChunk(targetBuffer, (int)batchCount); } processed += batchCount; currentOffset += batchSize; } } return(true); }
protected PointCloudVerticalHistogram GenerateHistogramDefault(DefaultHeaderData headerData, PointCloudBounds bounds) { var fileName = Path.GetFileName(FilePath); long currentOffset = headerData.DataOffset; long processed = 0; var result = new PointCloudVerticalHistogram(bounds); using (var file = MemoryMappedFile.CreateFromFile(FilePath ?? throw new Exception("Input file not found."), FileMode.Open)) { var batchIndex = 0; var maxArraySize = TreeUtility.CalculateMaxArraySize(headerData.DataStride); var totalBatchCount = Mathf.CeilToInt((float)headerData.DataCount / maxArraySize); while (processed < headerData.DataCount) { var batchCount = Math.Min(maxArraySize, headerData.DataCount - processed); var batchSize = batchCount * headerData.DataStride; using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read)) { unsafe { batchIndex++; var progressBarTitle = $"Calculating bounds ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})"; var batchHistogram = PointImportJobs.GenerateHistogram(view, batchCount, headerData.DataStride, headerData.Elements, bounds, progressBarTitle); result.AddData(batchHistogram.regions); } } processed += batchCount; currentOffset += batchSize; } } return(result); }
/// <summary> /// Default <see cref="ConvertPoints"/> implementation for formats using <see cref="DefaultHeaderData"/>. /// </summary> /// <param name="headerData">Data extracted from file header.</param> /// <param name="target">Target processor dispatcher to which transformed points should be passed.</param> /// <param name="transformationData">Data used for transformation of the points.</param> /// <returns>True if conversion finished successfully, false otherwise.</returns> protected bool ConvertPointsDefault(DefaultHeaderData headerData, NodeProcessorDispatcher target, TransformationData transformationData) { var fileName = Path.GetFileName(FilePath); long currentOffset = headerData.DataOffset; long processed = 0; using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open)) { var batchIndex = 0; var maxArraySize = TreeUtility.CalculateMaxArraySize(headerData.DataStride); var totalBatchCount = Mathf.CeilToInt((float)headerData.DataCount / maxArraySize); while (processed < headerData.DataCount) { var batchCount = Math.Min(maxArraySize, headerData.DataCount - processed); var batchSize = batchCount * headerData.DataStride; using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read)) { batchIndex++; var progressBarTitle = $"Converting ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})"; var targetBuffer = target.PublicMaxSizeBuffer; PointImportJobs.ConvertData(view, targetBuffer, headerData.Elements, transformationData, headerData.DataStride, batchCount, progressBarTitle); target.AddChunk(targetBuffer, (int)batchCount); } processed += batchCount; currentOffset += batchSize; } } return(true); }
/// <summary> /// Default <see cref="CalculateBounds"/> implementation for formats using <see cref="DefaultHeaderData"/>. /// </summary> /// <param name="headerData">Data extracted from file header.</param> /// <returns>Bounds of points in given file.</returns> protected PointCloudBounds CalculateBoundsDefault(DefaultHeaderData headerData) { var fileName = Path.GetFileName(FilePath); long currentOffset = headerData.DataOffset; long processed = 0; var result = PointCloudBounds.Empty; using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open)) { var batchIndex = 0; var maxArraySize = TreeUtility.CalculateMaxArraySize(headerData.DataStride); var totalBatchCount = Mathf.CeilToInt((float)headerData.DataCount / maxArraySize); while (processed < headerData.DataCount) { var batchCount = Math.Min(maxArraySize, headerData.DataCount - processed); var batchSize = batchCount * headerData.DataStride; using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read)) { batchIndex++; var progressBarTitle = $"Calculating bounds ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})"; var batchBounds = PointImportJobs.CalculateBounds(view, batchCount, headerData.DataStride, headerData.Elements, progressBarTitle); result.Encapsulate(batchBounds); } processed += batchCount; currentOffset += batchSize; } } return(result); }
public static void ConvertData(MemoryMappedViewAccessor accessor, PointCloudPoint[] target, List <PointElement> elements, TransformationData transformationData, int stride, long count, string progressBarTitle = null) { var maxArraySize = TreeUtility.CalculateMaxArraySize(stride); if (count > maxArraySize) { Debug.LogWarning( $"Too many points ({count:n0}), truncating to {maxArraySize:n0}"); count = maxArraySize; } if (target.Length < count) { throw new Exception($"Target buffer is too small (length: {target.Length}, required: {count}"); } unsafe { byte *sourcePtr = null; accessor.SafeMemoryMappedViewHandle.AcquirePointer(ref sourcePtr); try { fixed(PointCloudPoint *targetPtr = target) { var counts = new NativeArray <int>(JobsUtility.MaxJobThreadCount, Allocator.TempJob); try { var job = new PointCloudConvertJob() { X = GetInputAccess(PointElementName.X, elements, sourcePtr, stride), Y = GetInputAccess(PointElementName.Y, elements, sourcePtr, stride), Z = GetInputAccess(PointElementName.Z, elements, sourcePtr, stride), Color = GetColorAccess(elements, sourcePtr, stride), Output = targetPtr, Transform = transformationData.TransformationMatrix, OutputCenterX = transformationData.OutputCenterX, OutputCenterY = transformationData.OutputCenterY, OutputCenterZ = transformationData.OutputCenterZ, OutputScaleX = transformationData.OutputScaleX, OutputScaleY = transformationData.OutputScaleY, OutputScaleZ = transformationData.OutputScaleZ, Counts = (int *)counts.GetUnsafePtr(), ThreadIndex = 0, }; var h = job.Schedule((int)count, 65536); while (!h.IsCompleted) { System.Threading.Thread.Sleep(100); var processed = counts.Sum(); var progress = (float)((double)processed / count); EditorUtility.DisplayProgressBar( string.IsNullOrEmpty(progressBarTitle) ? $"Applying transformation" : progressBarTitle, $"{processed:N0} points", progress); } } finally { EditorUtility.ClearProgressBar(); counts.Dispose(); } } } finally { accessor.SafeMemoryMappedViewHandle.ReleasePointer(); } } }
public static PointCloudVerticalHistogram GenerateHistogramLas(MemoryMappedViewAccessor accessor, long count, LasPointProcessor.LasHeader header, PointCloudBounds bounds, string progressBarTitle = null) { var maxArraySize = TreeUtility.CalculateMaxArraySize(header.PointDataSize); if (count > maxArraySize) { Debug.LogWarning( $"Too many points ({count:n0}), truncating to {maxArraySize:n0}"); count = maxArraySize; } unsafe { byte *ptr = null; accessor.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr); try { var counts = new NativeArray <int>(JobsUtility.MaxJobThreadCount, Allocator.TempJob); var histograms = new NativeArray <PointCloudVerticalHistogram>( JobsUtility.MaxJobThreadCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); for (var i = 0; i < histograms.Length; i++) { histograms[i] = new PointCloudVerticalHistogram(bounds); } try { var job = new PointCloudCreateHistogramLasJob() { Input = ptr, Stride = header.PointDataSize, InputScaleX = header.ScaleX, InputScaleY = header.ScaleY, InputScaleZ = header.ScaleZ, InputOffsetX = header.OffsetX, InputOffsetY = header.OffsetY, InputOffsetZ = header.OffsetZ, Histogram = (PointCloudVerticalHistogram *)histograms.GetUnsafePtr(), Counts = (int *)counts.GetUnsafePtr(), ThreadIndex = 0, }; var h = job.Schedule((int)count, 65536); while (!h.IsCompleted) { System.Threading.Thread.Sleep(100); var processed = counts.Sum(); var progress = (float)((double)processed / count); EditorUtility.DisplayProgressBar( string.IsNullOrEmpty(progressBarTitle) ? "Generating histogram" : progressBarTitle, $"{processed:N0} points", progress); } var result = new PointCloudVerticalHistogram(bounds); foreach (var hst in histograms) { result.AddData(hst.regions); } return(result); } finally { EditorUtility.ClearProgressBar(); histograms.Dispose(); counts.Dispose(); } } finally { accessor.SafeMemoryMappedViewHandle.ReleasePointer(); } } }
public static PointCloudBounds CalculateBounds(MemoryMappedViewAccessor accessor, long count, int stride, List <PointElement> elements, string progressBarTitle = null) { var maxArraySize = TreeUtility.CalculateMaxArraySize(stride); if (count > maxArraySize) { Debug.LogWarning( $"Too many points ({count:n0}), truncating to {maxArraySize:n0}"); count = maxArraySize; } unsafe { byte *ptr = null; accessor.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr); try { var counts = new NativeArray <int>(JobsUtility.MaxJobThreadCount, Allocator.TempJob); var bounds = new NativeArray <PointCloudBounds>(JobsUtility.MaxJobThreadCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); for (var i = 0; i < bounds.Length; i++) { bounds[i] = PointCloudBounds.Empty; } try { var job = new PointCloudGetBoundsJob() { X = GetInputAccess(PointElementName.X, elements, ptr, stride), Y = GetInputAccess(PointElementName.Y, elements, ptr, stride), Z = GetInputAccess(PointElementName.Z, elements, ptr, stride), Bounds = (PointCloudBounds *)bounds.GetUnsafePtr(), Counts = (int *)counts.GetUnsafePtr(), ThreadIndex = 0, }; var h = job.Schedule((int)count, 65536); while (!h.IsCompleted) { System.Threading.Thread.Sleep(100); var processed = counts.Sum(); var progress = (float)((double)processed / count); EditorUtility.DisplayProgressBar( string.IsNullOrEmpty(progressBarTitle) ? "Calculating bounds" : progressBarTitle, $"{processed:N0} points", progress); } var result = PointCloudBounds.Empty; foreach (var b in bounds) { if (b.IsValid) { result.Add(b.MinX, b.MinY, b.MinZ); result.Add(b.MaxX, b.MaxY, b.MaxZ); } } return(result); } finally { EditorUtility.ClearProgressBar(); bounds.Dispose(); counts.Dispose(); } } finally { accessor.SafeMemoryMappedViewHandle.ReleasePointer(); } } }