示例#1
0
        public override PointCloudVerticalHistogram GenerateHistogram(PointCloudBounds bounds)
        {
            var fileName = Path.GetFileName(FilePath);

            long currentOffset = header.PointDataOffset;
            long processed     = 0;

            long count = header.PointDataCount;

            if (header.VersionMajor > 1 || header.VersionMajor == 1 && header.VersionMinor >= 4)
            {
                if (count == 0)
                {
                    count = (long)header.PointDataCountLong;
                }
            }

            var result = new PointCloudVerticalHistogram(bounds);

            using (var file = MemoryMappedFile.CreateFromFile(FilePath ?? throw new Exception("Input file not found."), FileMode.Open))
            {
                var batchIndex = 0;

                var maxArraySize    = TreeUtility.CalculateMaxArraySize(header.PointDataSize);
                var totalBatchCount = Mathf.CeilToInt((float)count / maxArraySize);

                while (processed < count)
                {
                    var batchCount = Math.Min(maxArraySize, count - processed);
                    var batchSize  = batchCount * header.PointDataSize;

                    using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read))
                    {
                        unsafe
                        {
                            batchIndex++;
                            var progressBarTitle =
                                $"Generating histogram ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})";

                            var hst = PointImportJobs.GenerateHistogramLas(view, batchCount, header, bounds, progressBarTitle);
                            result.AddData(hst.regions);
                        }
                    }

                    processed     += batchCount;
                    currentOffset += batchSize;
                }
            }

            return(result);
        }
示例#2
0
        ///<inheritdoc/>
        public override bool ConvertPoints(NodeProcessorDispatcher target, TransformationData transformationData)
        {
            var fileName = Path.GetFileName(FilePath);

            long currentOffset = header.PointDataOffset;
            long processed     = 0;

            long count = header.PointDataCount;

            if (header.VersionMajor > 1 || header.VersionMajor == 1 && header.VersionMinor >= 4)
            {
                if (count == 0)
                {
                    count = (long)header.PointDataCountLong;
                }
            }

            using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open))
            {
                var batchIndex = 0;

                var totalBatchCount =
                    Mathf.CeilToInt((float)count / TreeUtility.MaxPointCountPerArray);

                while (processed < count)
                {
                    var batchCount = Math.Min(TreeUtility.MaxPointCountPerArray, count - processed);
                    var batchSize  = batchCount * header.PointDataSize;

                    using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read))
                    {
                        batchIndex++;
                        var progressBarTitle =
                            $"Converting ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})";
                        var targetBuffer = target.PublicMaxSizeBuffer;

                        PointImportJobs.ConvertLasData(view, targetBuffer, header.PointDataSize, (int)batchCount,
                                                       ref header, transformationData, progressBarTitle);

                        target.AddChunk(targetBuffer, (int)batchCount);
                    }

                    processed     += batchCount;
                    currentOffset += batchSize;
                }
            }

            return(true);
        }
示例#3
0
        protected PointCloudVerticalHistogram GenerateHistogramDefault(DefaultHeaderData headerData, PointCloudBounds bounds)
        {
            var fileName = Path.GetFileName(FilePath);

            long currentOffset = headerData.DataOffset;
            long processed     = 0;

            var result = new PointCloudVerticalHistogram(bounds);

            using (var file = MemoryMappedFile.CreateFromFile(FilePath ?? throw new Exception("Input file not found."), FileMode.Open))
            {
                var batchIndex = 0;

                var maxArraySize    = TreeUtility.CalculateMaxArraySize(headerData.DataStride);
                var totalBatchCount = Mathf.CeilToInt((float)headerData.DataCount / maxArraySize);

                while (processed < headerData.DataCount)
                {
                    var batchCount = Math.Min(maxArraySize, headerData.DataCount - processed);
                    var batchSize  = batchCount * headerData.DataStride;

                    using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read))
                    {
                        unsafe
                        {
                            batchIndex++;
                            var progressBarTitle =
                                $"Calculating bounds ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})";

                            var batchHistogram = PointImportJobs.GenerateHistogram(view, batchCount,
                                                                                   headerData.DataStride, headerData.Elements, bounds, progressBarTitle);

                            result.AddData(batchHistogram.regions);
                        }
                    }

                    processed     += batchCount;
                    currentOffset += batchSize;
                }
            }

            return(result);
        }
示例#4
0
        /// <summary>
        /// Default <see cref="ConvertPoints"/> implementation for formats using <see cref="DefaultHeaderData"/>.
        /// </summary>
        /// <param name="headerData">Data extracted from file header.</param>
        /// <param name="target">Target processor dispatcher to which transformed points should be passed.</param>
        /// <param name="transformationData">Data used for transformation of the points.</param>
        /// <returns>True if conversion finished successfully, false otherwise.</returns>
        protected bool ConvertPointsDefault(DefaultHeaderData headerData, NodeProcessorDispatcher target,
                                            TransformationData transformationData)
        {
            var fileName = Path.GetFileName(FilePath);

            long currentOffset = headerData.DataOffset;
            long processed     = 0;

            using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open))
            {
                var batchIndex = 0;

                var maxArraySize    = TreeUtility.CalculateMaxArraySize(headerData.DataStride);
                var totalBatchCount =
                    Mathf.CeilToInt((float)headerData.DataCount / maxArraySize);

                while (processed < headerData.DataCount)
                {
                    var batchCount = Math.Min(maxArraySize, headerData.DataCount - processed);
                    var batchSize  = batchCount * headerData.DataStride;

                    using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read))
                    {
                        batchIndex++;
                        var progressBarTitle =
                            $"Converting ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})";
                        var targetBuffer = target.PublicMaxSizeBuffer;

                        PointImportJobs.ConvertData(view, targetBuffer, headerData.Elements, transformationData,
                                                    headerData.DataStride, batchCount, progressBarTitle);

                        target.AddChunk(targetBuffer, (int)batchCount);
                    }

                    processed     += batchCount;
                    currentOffset += batchSize;
                }
            }

            return(true);
        }
示例#5
0
        /// <summary>
        /// Default <see cref="CalculateBounds"/> implementation for formats using <see cref="DefaultHeaderData"/>.
        /// </summary>
        /// <param name="headerData">Data extracted from file header.</param>
        /// <returns>Bounds of points in given file.</returns>
        protected PointCloudBounds CalculateBoundsDefault(DefaultHeaderData headerData)
        {
            var fileName = Path.GetFileName(FilePath);

            long currentOffset = headerData.DataOffset;
            long processed     = 0;

            var result = PointCloudBounds.Empty;

            using (var file = MemoryMappedFile.CreateFromFile(FilePath, FileMode.Open))
            {
                var batchIndex = 0;

                var totalBatchCount =
                    Mathf.CeilToInt((float)headerData.DataCount / TreeUtility.MaxPointCountPerArray);

                while (processed < headerData.DataCount)
                {
                    var batchCount = Math.Min(TreeUtility.MaxPointCountPerArray, headerData.DataCount - processed);
                    var batchSize  = batchCount * headerData.DataStride;

                    using (var view = file.CreateViewAccessor(currentOffset, batchSize, MemoryMappedFileAccess.Read))
                    {
                        batchIndex++;
                        var progressBarTitle =
                            $"Calculating bounds ({fileName}, batch {batchIndex.ToString()}/{totalBatchCount.ToString()})";

                        var batchBounds = PointImportJobs.CalculateBounds(view, batchCount,
                                                                          headerData.DataStride, headerData.Elements, progressBarTitle);

                        result.Encapsulate(batchBounds);
                    }

                    processed     += batchCount;
                    currentOffset += batchSize;
                }
            }

            return(result);
        }