public PointCloudAnalysisResult(PointCloudTileDensity density, Statistics statistics, SQuantization3D quantization, List <PointCloudBinarySourceEnumeratorSparseGridRegion> gridIndex) { Density = density; Statistics = statistics; Quantization = quantization; GridIndex = gridIndex; }
private static PointCloudAnalysisResult QuantEstimateDensity(IPointCloudBinarySource source, int maxSegmentLength, SQuantizedExtentGrid <int> tileCounts, ProgressManager progressManager) { Statistics stats = null; List <PointCloudBinarySourceEnumeratorSparseGridRegion> gridIndexSegments = null; var extent = source.Extent; var quantizedExtent = source.QuantizedExtent; var statsMapping = new ScaledStatisticsMapping(quantizedExtent.MinZ, quantizedExtent.RangeZ, 1024); var gridCounter = new GridCounter(source, tileCounts); using (var process = progressManager.StartProcess("QuantEstimateDensity")) { var group = new ChunkProcessSet(gridCounter, statsMapping); group.Process(source.GetBlockEnumerator(process)); } stats = statsMapping.ComputeStatistics(extent.MinZ, extent.RangeZ); var density = new PointCloudTileDensity(tileCounts, source.Quantization); gridIndexSegments = gridCounter.GetGridIndex(density, maxSegmentLength); var result = new PointCloudAnalysisResult(density, stats, source.Quantization, gridIndexSegments); return(result); }
public PointCloudTileSet(IPointCloudBinarySource source, PointCloudTileDensity density, SQuantizedExtentGrid <int> tileCounts, Grid <int> lowResCounts) { Extent = source.Extent; Quantization = source.Quantization; QuantizedExtent = source.QuantizedExtent; Density = density; Cols = tileCounts.SizeX; Rows = tileCounts.SizeY; TileSizeX = tileCounts.CellSizeX; TileSizeY = tileCounts.CellSizeY; //TileSize = tileCounts.CellSize; PointCount = density.PointCount; TileCount = density.TileCount; ValidTileCount = density.ValidTileCount; LowResCount = 0; m_tileIndex = CreateTileIndex(ValidTileCount); m_tiles = new PointCloudTile[density.ValidTileCount]; // create valid tiles (in order) long offset = 0; int validTileIndex = 0; foreach (var tile in GetTileOrdering(Rows, Cols)) { int pointCount = tileCounts.Data[tile.Row, tile.Col]; if (pointCount > 0) { var lowResCount = lowResCounts.Data[tile.Row, tile.Col]; m_tiles[validTileIndex] = new PointCloudTile(this, tile.Col, tile.Row, validTileIndex, offset, pointCount, LowResCount, lowResCount); m_tileIndex.Add(tile.Index, validTileIndex); ++validTileIndex; offset += (pointCount - lowResCount); LowResCount += lowResCount; } } }
public PointCloudTileSet(BinaryReader reader) { Rows = reader.ReadUInt16(); Cols = reader.ReadUInt16(); TileSizeY = reader.ReadInt32(); TileSizeX = reader.ReadInt32(); TileCount = Rows * Cols; Extent = reader.ReadExtent3D(); Quantization = reader.ReadSQuantization3D(); QuantizedExtent = Quantization.Convert(Extent); Density = reader.ReadTileDensity(); PointCount = Density.PointCount; ValidTileCount = Density.ValidTileCount; LowResCount = 0; m_tileIndex = CreateTileIndex(ValidTileCount); m_tiles = new PointCloudTile[ValidTileCount]; // fill in valid tiles (dense) long pointOffset = 0; var i = 0; foreach (var tile in GetTileOrdering(Rows, Cols)) { var pointCount = reader.ReadInt32(); var lowResCount = reader.ReadInt32(); if (pointCount > 0) { m_tiles[i] = new PointCloudTile(this, tile.Col, tile.Row, i, pointOffset, pointCount, LowResCount, lowResCount); m_tileIndex.Add(tile.Index, i); pointOffset += (pointCount - lowResCount); LowResCount += lowResCount; ++i; } } }
public PointCloudAnalysisResult(PointCloudTileDensity density, Statistics statistics, SQuantization3D quantization) : this(density, statistics, quantization, null) { }
public PointCloudTileSource TilePointFileIndex(LASFile tiledFile, BufferInstance segmentBuffer, ProgressManager progressManager) { var stopwatch = new Stopwatch(); stopwatch.Start(); var analysis = AnalyzePointFile(segmentBuffer.Length, progressManager); var quantizedExtent = m_source.QuantizedExtent; var tileCounts = analysis.Density.GetTileCountsForInitialization(); var fileSize = tiledFile.PointDataOffset + (m_source.PointSizeBytes * m_source.Count); AttemptFastAllocate(tiledFile.FilePath, fileSize); var lowResPointCountMax = PROPERTY_MAX_LOWRES_POINTS.Value; var lowResBuffer = BufferManager.AcquireBuffer(m_id, lowResPointCountMax * m_source.PointSizeBytes); var lowResWrapper = new PointBufferWrapper(lowResBuffer, m_source.PointSizeBytes, lowResPointCountMax); var validTiles = analysis.GridIndex.Sum(r => r.GridRange.ValidCells); var lowResPointsPerTile = lowResPointCountMax / validTiles; var lowResTileSize = (ushort)Math.Sqrt(lowResPointsPerTile); var lowResGrid = Grid <int> .Create(lowResTileSize, lowResTileSize, true, -1); var lowResCounts = tileCounts.Copy <int>(); using (var outputStream = StreamManager.OpenWriteStream(tiledFile.FilePath, fileSize, tiledFile.PointDataOffset)) { var i = 0; foreach (var segment in analysis.GridIndex) { progressManager.Log("~ Processing Index Segment {0}/{1}", ++i, analysis.GridIndex.Count); var sparseSegment = m_source.CreateSparseSegment(segment); var sparseSegmentWrapper = new PointBufferWrapper(segmentBuffer, sparseSegment); var tileRegionFilter = new TileRegionFilter(tileCounts, quantizedExtent, segment.GridRange); // this call will fill the buffer with points, add the counts, and sort QuantTilePointsIndexed(sparseSegment, sparseSegmentWrapper, tileRegionFilter, tileCounts, lowResWrapper, lowResGrid, lowResCounts, progressManager); var segmentFilteredPointCount = tileRegionFilter.GetCellOrdering().Sum(t => tileCounts.Data[t.Row, t.Col]); var segmentFilteredBytes = segmentFilteredPointCount * sparseSegmentWrapper.PointSizeBytes; // write out the buffer using (var process = progressManager.StartProcess("WriteIndexSegment")) { var segmentBufferIndex = 0; foreach (var tile in segment.GridRange.GetCellOrdering()) { var tileCount = tileCounts.Data[tile.Row, tile.Col]; if (tileCount > 0) { var tileSize = (tileCount - lowResCounts.Data[tile.Row, tile.Col]) * sparseSegmentWrapper.PointSizeBytes; outputStream.Write(sparseSegmentWrapper.Data, segmentBufferIndex, tileSize); segmentBufferIndex += tileSize; if (!process.Update((float)segmentBufferIndex / segmentFilteredBytes)) { break; } } } } if (progressManager.IsCanceled()) { break; } } // write low-res var lowResActualPointCount = lowResCounts.Data.Cast <int>().Sum(); outputStream.Write(lowResWrapper.Data, 0, lowResActualPointCount * lowResWrapper.PointSizeBytes); } var actualDensity = new PointCloudTileDensity(tileCounts, m_source.Quantization); var tileSet = new PointCloudTileSet(m_source, actualDensity, tileCounts, lowResCounts); var tileSource = new PointCloudTileSource(tiledFile, tileSet, analysis.Statistics); if (!progressManager.IsCanceled()) { tileSource.IsDirty = false; } tileSource.WriteHeader(); return(tileSource); }
public List<PointCloudBinarySourceEnumeratorSparseGridRegion> GetGridIndex(PointCloudTileDensity density, int maxSegmentLength) { var maxSegmentPointCount = (maxSegmentLength / m_source.PointSizeBytes); // update index cells var indexGrid = (SQuantizedExtentGrid<GridIndexCell>)m_grid.Copy<GridIndexCell>(); for (var i = 0; i < m_chunkTiles.Count; i++) { foreach (var tileIndex in m_chunkTiles[i]) { var coord = new PointCloudTileCoord(tileIndex); var indexCell = indexGrid.Data[coord.Row, coord.Col]; if (indexCell == null) { indexCell = new GridIndexCell(); indexGrid.Data[coord.Row, coord.Col] = indexCell; } indexCell.Add(i); } } indexGrid.CorrectCountOverflow(); var actualGrid = density.CreateTileCountsForInitialization(m_source); var regionSourcesBySegment = new List<List<Range>>(); var tilesPerSegment = new List<GridRange>(); var pointDataBytes = m_source.PointSizeBytes * m_source.Count; var tileOrder = PointCloudTileSet.GetTileOrdering(actualGrid.SizeY, actualGrid.SizeX).ToArray(); var tileOrderIndex = 0; while (tileOrderIndex < tileOrder.Length) { var segmentTilesFromEstimation = new HashSet<int>(); var segmentChunks = new HashSet<int>(); var startTile = tileOrder[tileOrderIndex]; var startTileIndex = new GridCoord(actualGrid.Def, startTile.Row, startTile.Col); var segmentProbableValidTileCount = 0; var segmentPointCount = 0; while (tileOrderIndex < tileOrder.Length) { var tile = tileOrder[tileOrderIndex]; // unfortunately, I cannot check the actual counts, because they have not been measured // instead, I can count the estimated area, and undershoot // get unique tiles/chunks var uniqueEstimatedCoords = indexGrid .GetCellCoordsInScaledRange(tile.Col, tile.Row, actualGrid) .Where(c => !segmentTilesFromEstimation.Contains(c.Index)) .ToList(); // count probable valid tiles for low-res estimation if (uniqueEstimatedCoords.Count > 0) { var uniquePointCount = uniqueEstimatedCoords.Sum(c => m_grid.Data[c.Row, c.Col]); if (segmentPointCount + uniquePointCount > maxSegmentPointCount) break; var uniqueChunks = uniqueEstimatedCoords .Select(c => indexGrid.Data[c.Row, c.Col]) .SelectMany(c => c.Chunks) .ToHashSet(segmentChunks); // no longer limiting the chunks per segment to fit buffer. // this means that the full chunk data will almost certainly // *not* fit in the segment buffer, but the filtered data will. // merge tiles/chunks foreach (var coord in uniqueEstimatedCoords) segmentTilesFromEstimation.Add(coord.Index); foreach (var index in uniqueChunks) segmentChunks.Add(index); segmentPointCount += uniquePointCount; ++segmentProbableValidTileCount; } ++tileOrderIndex; } if (segmentChunks.Count > 0) { var endTile = tileOrder[tileOrderIndex - 1]; var endTileIndex = new GridCoord(actualGrid.Def, endTile.Row, endTile.Col); // group by sequential regions var sortedCellList = segmentChunks.ToArray(); Array.Sort(sortedCellList); var regions = new List<Range>(); var sequenceStartIndex = 0; while (sequenceStartIndex < sortedCellList.Length) { // find incremental sequence int i = sequenceStartIndex; ++i; while (i < sortedCellList.Length && (sortedCellList[i] == sortedCellList[i - 1] + 1)) ++i; regions.Add(new Range(sortedCellList[sequenceStartIndex], i - sequenceStartIndex)); sequenceStartIndex = i; } regionSourcesBySegment.Add(regions); tilesPerSegment.Add(new GridRange(startTileIndex, endTileIndex, segmentProbableValidTileCount)); } } var chunkRangeSumForAllSegments = regionSourcesBySegment.Sum(r => r.Sum(r2 => r2.Count)); Context.WriteLine("chunkRangeSumForAllSegments: {0}", chunkRangeSumForAllSegments); Context.WriteLine(" ratio: {0}", (double)chunkRangeSumForAllSegments * BufferManager.BUFFER_SIZE_BYTES / pointDataBytes); return regionSourcesBySegment.Select((t, i) => new PointCloudBinarySourceEnumeratorSparseGridRegion(t, tilesPerSegment[i], m_maxPointCountPerChunk)).ToList(); }
public PointCloudTileSource TilePointFileIndex(LASFile tiledFile, BufferInstance segmentBuffer, ProgressManager progressManager) { var stopwatch = new Stopwatch(); stopwatch.Start(); var analysis = AnalyzePointFile(segmentBuffer.Length, progressManager); var quantizedExtent = m_source.QuantizedExtent; var tileCounts = analysis.Density.GetTileCountsForInitialization(); var fileSize = tiledFile.PointDataOffset + (m_source.PointSizeBytes * m_source.Count); AttemptFastAllocate(tiledFile.FilePath, fileSize); var lowResPointCountMax = PROPERTY_MAX_LOWRES_POINTS.Value; var lowResBuffer = BufferManager.AcquireBuffer(m_id, lowResPointCountMax * m_source.PointSizeBytes); var lowResWrapper = new PointBufferWrapper(lowResBuffer, m_source.PointSizeBytes, lowResPointCountMax); var validTiles = analysis.GridIndex.Sum(r => r.GridRange.ValidCells); var lowResPointsPerTile = lowResPointCountMax / validTiles; var lowResTileSize = (ushort)Math.Sqrt(lowResPointsPerTile); var lowResGrid = Grid<int>.Create(lowResTileSize, lowResTileSize, true, -1); var lowResCounts = tileCounts.Copy<int>(); using (var outputStream = StreamManager.OpenWriteStream(tiledFile.FilePath, fileSize, tiledFile.PointDataOffset)) { var i = 0; foreach (var segment in analysis.GridIndex) { progressManager.Log("~ Processing Index Segment {0}/{1}", ++i, analysis.GridIndex.Count); var sparseSegment = m_source.CreateSparseSegment(segment); var sparseSegmentWrapper = new PointBufferWrapper(segmentBuffer, sparseSegment); var tileRegionFilter = new TileRegionFilter(tileCounts, quantizedExtent, segment.GridRange); // this call will fill the buffer with points, add the counts, and sort QuantTilePointsIndexed(sparseSegment, sparseSegmentWrapper, tileRegionFilter, tileCounts, lowResWrapper, lowResGrid, lowResCounts, progressManager); var segmentFilteredPointCount = tileRegionFilter.GetCellOrdering().Sum(t => tileCounts.Data[t.Row, t.Col]); var segmentFilteredBytes = segmentFilteredPointCount * sparseSegmentWrapper.PointSizeBytes; // write out the buffer using (var process = progressManager.StartProcess("WriteIndexSegment")) { var segmentBufferIndex = 0; foreach (var tile in segment.GridRange.GetCellOrdering()) { var tileCount = tileCounts.Data[tile.Row, tile.Col]; if (tileCount > 0) { var tileSize = (tileCount - lowResCounts.Data[tile.Row, tile.Col]) * sparseSegmentWrapper.PointSizeBytes; outputStream.Write(sparseSegmentWrapper.Data, segmentBufferIndex, tileSize); segmentBufferIndex += tileSize; if (!process.Update((float)segmentBufferIndex / segmentFilteredBytes)) break; } } } if (progressManager.IsCanceled()) break; } // write low-res var lowResActualPointCount = lowResCounts.Data.Cast<int>().Sum(); outputStream.Write(lowResWrapper.Data, 0, lowResActualPointCount * lowResWrapper.PointSizeBytes); } var actualDensity = new PointCloudTileDensity(tileCounts, m_source.Quantization); var tileSet = new PointCloudTileSet(m_source, actualDensity, tileCounts, lowResCounts); var tileSource = new PointCloudTileSource(tiledFile, tileSet, analysis.Statistics); if (!progressManager.IsCanceled()) tileSource.IsDirty = false; tileSource.WriteHeader(); return tileSource; }
private static PointCloudAnalysisResult QuantEstimateDensity(IPointCloudBinarySource source, int maxSegmentLength, SQuantizedExtentGrid<int> tileCounts, ProgressManager progressManager) { Statistics stats = null; List<PointCloudBinarySourceEnumeratorSparseGridRegion> gridIndexSegments = null; var extent = source.Extent; var quantizedExtent = source.QuantizedExtent; var statsMapping = new ScaledStatisticsMapping(quantizedExtent.MinZ, quantizedExtent.RangeZ, 1024); var gridCounter = new GridCounter(source, tileCounts); using (var process = progressManager.StartProcess("QuantEstimateDensity")) { var group = new ChunkProcessSet(gridCounter, statsMapping); group.Process(source.GetBlockEnumerator(process)); } stats = statsMapping.ComputeStatistics(extent.MinZ, extent.RangeZ); var density = new PointCloudTileDensity(tileCounts, source.Quantization); gridIndexSegments = gridCounter.GetGridIndex(density, maxSegmentLength); var result = new PointCloudAnalysisResult(density, stats, source.Quantization, gridIndexSegments); return result; }
public List <PointCloudBinarySourceEnumeratorSparseGridRegion> GetGridIndex(PointCloudTileDensity density, int maxSegmentLength) { var maxSegmentPointCount = (maxSegmentLength / m_source.PointSizeBytes); // update index cells var indexGrid = (SQuantizedExtentGrid <GridIndexCell>)m_grid.Copy <GridIndexCell>(); for (var i = 0; i < m_chunkTiles.Count; i++) { foreach (var tileIndex in m_chunkTiles[i]) { var coord = new PointCloudTileCoord(tileIndex); var indexCell = indexGrid.Data[coord.Row, coord.Col]; if (indexCell == null) { indexCell = new GridIndexCell(); indexGrid.Data[coord.Row, coord.Col] = indexCell; } indexCell.Add(i); } } indexGrid.CorrectCountOverflow(); var actualGrid = density.CreateTileCountsForInitialization(m_source); var regionSourcesBySegment = new List <List <Range> >(); var tilesPerSegment = new List <GridRange>(); var pointDataBytes = m_source.PointSizeBytes * m_source.Count; var tileOrder = PointCloudTileSet.GetTileOrdering(actualGrid.SizeY, actualGrid.SizeX).ToArray(); var tileOrderIndex = 0; while (tileOrderIndex < tileOrder.Length) { var segmentTilesFromEstimation = new HashSet <int>(); var segmentChunks = new HashSet <int>(); var startTile = tileOrder[tileOrderIndex]; var startTileIndex = new GridCoord(actualGrid.Def, startTile.Row, startTile.Col); var segmentProbableValidTileCount = 0; var segmentPointCount = 0; while (tileOrderIndex < tileOrder.Length) { var tile = tileOrder[tileOrderIndex]; // unfortunately, I cannot check the actual counts, because they have not been measured // instead, I can count the estimated area, and undershoot // get unique tiles/chunks var uniqueEstimatedCoords = indexGrid .GetCellCoordsInScaledRange(tile.Col, tile.Row, actualGrid) .Where(c => !segmentTilesFromEstimation.Contains(c.Index)) .ToList(); // count probable valid tiles for low-res estimation if (uniqueEstimatedCoords.Count > 0) { var uniquePointCount = uniqueEstimatedCoords.Sum(c => m_grid.Data[c.Row, c.Col]); if (segmentPointCount + uniquePointCount > maxSegmentPointCount) { break; } var uniqueChunks = uniqueEstimatedCoords .Select(c => indexGrid.Data[c.Row, c.Col]) .SelectMany(c => c.Chunks) .ToHashSet(segmentChunks); // no longer limiting the chunks per segment to fit buffer. // this means that the full chunk data will almost certainly // *not* fit in the segment buffer, but the filtered data will. // merge tiles/chunks foreach (var coord in uniqueEstimatedCoords) { segmentTilesFromEstimation.Add(coord.Index); } foreach (var index in uniqueChunks) { segmentChunks.Add(index); } segmentPointCount += uniquePointCount; ++segmentProbableValidTileCount; } ++tileOrderIndex; } if (segmentChunks.Count > 0) { var endTile = tileOrder[tileOrderIndex - 1]; var endTileIndex = new GridCoord(actualGrid.Def, endTile.Row, endTile.Col); // group by sequential regions var sortedCellList = segmentChunks.ToArray(); Array.Sort(sortedCellList); var regions = new List <Range>(); var sequenceStartIndex = 0; while (sequenceStartIndex < sortedCellList.Length) { // find incremental sequence int i = sequenceStartIndex; ++i; while (i < sortedCellList.Length && (sortedCellList[i] == sortedCellList[i - 1] + 1)) { ++i; } regions.Add(new Range(sortedCellList[sequenceStartIndex], i - sequenceStartIndex)); sequenceStartIndex = i; } regionSourcesBySegment.Add(regions); tilesPerSegment.Add(new GridRange(startTileIndex, endTileIndex, segmentProbableValidTileCount)); } } var chunkRangeSumForAllSegments = regionSourcesBySegment.Sum(r => r.Sum(r2 => r2.Count)); Context.WriteLine("chunkRangeSumForAllSegments: {0}", chunkRangeSumForAllSegments); Context.WriteLine(" ratio: {0}", (double)chunkRangeSumForAllSegments * BufferManager.BUFFER_SIZE_BYTES / pointDataBytes); return(regionSourcesBySegment.Select((t, i) => new PointCloudBinarySourceEnumeratorSparseGridRegion(t, tilesPerSegment[i], m_maxPointCountPerChunk)).ToList()); }