/// <summary> /// Starts tree building process with given settings. /// </summary> public static void BuildNodeTree(TreeImportSettings settings) { var processors = new List <PointProcessor>(); foreach (var inputFile in settings.inputFiles) { var processor = CreateProcessor(inputFile); if (processor != null) { processors.Add(processor); } } if (processors.Count == 0) { Debug.LogError("All of given point cloud files are invalid or unsupported."); return; } var bounds = CalculateBounds(processors); var transformationData = new TransformationData(bounds, settings); var unityBounds = bounds.GetUnityBounds(settings); var transform = transformationData.TransformationMatrix; unityBounds.center = transform.MultiplyPoint3x4(unityBounds.center); unityBounds.extents = transform.MultiplyVector(unityBounds.extents); NodeProcessorDispatcher dispatcher; try { EditorUtility.DisplayProgressBar("Creating dispatcher", "Preparing target directory...", 0f); dispatcher = new NodeProcessorDispatcher(settings.outputPath, settings); } finally { EditorUtility.ClearProgressBar(); } foreach (var processor in processors) { if (!processor.ConvertPoints(dispatcher, transformationData)) { Debug.Log("Import cancelled."); return; } } if (dispatcher.ProcessPoints(unityBounds)) { Debug.Log("Octree build finished successfully."); } else { Debug.Log("Octree build failed."); } }
///<inheritdoc/> public override bool ConvertPoints(NodeProcessorDispatcher target, TransformationData transformationData) { var fileName = Path.GetFileName(FilePath); long currentOffset = header.PointDataOffset; long processed = 0; long count = header.PointDataCount; if (header.VersionMajor > 1 || header.VersionMajor == 1 && header.VersionMinor >= 4) { if (count == 0) { count = (long)header.PointDataCountLong; } } using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open)) { var batchIndex = 0; var totalBatchCount = Mathf.CeilToInt((float)count / TreeUtility.MaxPointCountPerArray); while (processed < count) { var batchCount = Math.Min(TreeUtility.MaxPointCountPerArray, count - processed); var batchSize = batchCount * header.PointDataSize; using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read)) { batchIndex++; var progressBarTitle = $"Converting ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})"; var targetBuffer = target.PublicMaxSizeBuffer; PointImportJobs.ConvertLasData(view, targetBuffer, header.PointDataSize, (int)batchCount, ref header, transformationData, progressBarTitle); target.AddChunk(targetBuffer, (int)batchCount); } processed += batchCount; currentOffset += batchSize; } } return(true); }
/// <summary> /// Default <see cref="ConvertPoints"/> implementation for formats using <see cref="DefaultHeaderData"/>. /// </summary> /// <param name="headerData">Data extracted from file header.</param> /// <param name="target">Target processor dispatcher to which transformed points should be passed.</param> /// <param name="transformationData">Data used for transformation of the points.</param> /// <returns>True if conversion finished successfully, false otherwise.</returns> protected bool ConvertPointsDefault(DefaultHeaderData headerData, NodeProcessorDispatcher target, TransformationData transformationData) { var fileName = Path.GetFileName(FilePath); long currentOffset = headerData.DataOffset; long processed = 0; using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open)) { var batchIndex = 0; var maxArraySize = TreeUtility.CalculateMaxArraySize(headerData.DataStride); var totalBatchCount = Mathf.CeilToInt((float)headerData.DataCount / maxArraySize); while (processed < headerData.DataCount) { var batchCount = Math.Min(maxArraySize, headerData.DataCount - processed); var batchSize = batchCount * headerData.DataStride; using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read)) { batchIndex++; var progressBarTitle = $"Converting ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})"; var targetBuffer = target.PublicMaxSizeBuffer; PointImportJobs.ConvertData(view, targetBuffer, headerData.Elements, transformationData, headerData.DataStride, batchCount, progressBarTitle); target.AddChunk(targetBuffer, (int)batchCount); } processed += batchCount; currentOffset += batchSize; } } return(true); }
/// <summary> /// Starts tree building process with given settings. /// </summary> public static bool BuildNodeTree(TreeImportSettings settings) { var processors = new List <PointProcessor>(); foreach (var inputFile in settings.inputFiles) { var processor = CreateProcessor(Utility.GetFullPath(inputFile)); if (processor != null) { processors.Add(processor); } } if (processors.Count == 0) { Debug.LogError("All of given point cloud files are invalid or unsupported."); return(false); } var bounds = CalculateBounds(processors); var transformationData = new TransformationData(bounds, settings); var unityBounds = bounds.GetUnityBounds(settings); var transform = transformationData.TransformationMatrix; unityBounds.center = transform.MultiplyPoint3x4(unityBounds.center); unityBounds.extents = transform.MultiplyVector(unityBounds.extents); TreeImportData importData = null; if (settings.generateMesh && settings.roadOnlyMesh) { var histogram = GenerateHistogram(processors, bounds); importData = new TreeImportData(unityBounds, histogram); } else { importData = new TreeImportData(unityBounds); } NodeProcessorDispatcher dispatcher; var fullOutputPath = Utility.GetFullPath(settings.outputPath); try { EditorUtility.DisplayProgressBar("Creating dispatcher", "Preparing target directory...", 0f); dispatcher = new NodeProcessorDispatcher(fullOutputPath, settings); } finally { EditorUtility.ClearProgressBar(); } foreach (var processor in processors) { if (!processor.ConvertPoints(dispatcher, transformationData)) { Debug.Log("Import cancelled."); return(false); } } if (dispatcher.ProcessPoints(importData)) { dispatcher.GetPointCountResults(out var total, out var used, out var discarded); Debug.Log($"Octree build finished successfully.\n" + $"Used points: {used}/{total} ({discarded} discarded on low tree levels)"); return(true); } else { Debug.Log("Octree build failed."); return(false); } }
/// <inheritdoc/> public override bool ConvertPoints(NodeProcessorDispatcher target, TransformationData transformationData) { return(ConvertPointsDefault(header, target, transformationData)); }
///<inheritdoc/> public override bool ConvertPoints(NodeProcessorDispatcher target, TransformationData transformationData) { var fileName = Path.GetFileName(FilePath); var progressTitle = $"Processing {fileName}"; using (var laz = new Laszip(FilePath)) { long count = laz.Count; bool hasColor = laz.HasColor; var transform = transformationData.TransformationMatrix; try { for (long i = 0; i < count; i++) { if (i % (1024 * 8) == 0) { var progress = (double)i / count; if (EditorUtility.DisplayCancelableProgressBar(progressTitle, $"{i:N0} points", (float)progress)) { return(false); } } var p = laz.GetNextPoint(); double x = (p.X + transformationData.OutputCenterX) * transformationData.OutputScaleX; double y = (p.Y + transformationData.OutputCenterY) * transformationData.OutputScaleY; double z = (p.Z + transformationData.OutputCenterZ) * transformationData.OutputScaleZ; var pt = transform.MultiplyVector(new Vector3((float)x, (float)y, (float)z)); byte intensity; if (transformationData.LasRGB8BitWorkaround) { intensity = (byte)p.Intensity; } else { intensity = (byte)(p.Intensity >> 8); } uint color = (uint)(intensity << 24); if (hasColor) { if (transformationData.LasRGB8BitWorkaround) { byte r = (byte)p.Red; byte g = (byte)p.Green; byte b = (byte)p.Blue; color |= (uint)((b << 16) | (g << 8) | r); } else { byte r = (byte)(p.Red >> 8); byte g = (byte)(p.Green >> 8); byte b = (byte)(p.Blue >> 8); color |= (uint)((b << 16) | (g << 8) | r); } } else { color |= (uint)((intensity << 16) | (intensity << 8) | intensity); } target.AddPoint(new PointCloudPoint() { Position = pt, Color = color, }); } } finally { EditorUtility.ClearProgressBar(); } } return(true); }
/// <summary> /// Converts points in file assigned to this processor using given transformation data and feeds them to target dispatcher. /// </summary> /// <param name="target">Target processor dispatcher to which transformed points should be passed.</param> /// <param name="transformationData">Data used for transformation of the points.</param> /// <returns>True if conversion finished successfully, false otherwise.</returns> public abstract bool ConvertPoints(NodeProcessorDispatcher target, TransformationData transformationData);