public FileStreamUnbufferedSequentialWrite(string path, long length, long startPosition, bool truncateOnClose) { m_path = path; m_id = IdentityManager.AcquireIdentity(string.Format("{0}:{1}", this.GetType().Name, m_path)); m_buffer = BufferManager.AcquireBuffer(m_id, true); m_sectorSize = PathUtil.GetDriveSectorSize(m_path); m_length = length; m_lengthAligned = (m_length + (m_sectorSize - 1)) & (~(long)(m_sectorSize - 1)); m_truncateOnClose = truncateOnClose; const FileMode mode = FileMode.OpenOrCreate; const FileAccess access = FileAccess.Write; const FileShare share = FileShare.None; const FileOptions options = (FileFlagNoBuffering | FileOptions.WriteThrough | FileOptions.SequentialScan); m_stream = new FileStream(m_path, mode, access, share, BUFFER_SIZE, options); m_stream.SetLength(m_lengthAligned); long startPositionAligned = ((startPosition + (m_sectorSize - 1)) & (~(long)(m_sectorSize - 1))) - m_sectorSize; if (startPositionAligned >= 0) { m_stream.Seek(startPositionAligned, SeekOrigin.Begin); } else { startPositionAligned = 0; } m_bufferIndex = (int)(startPosition - startPositionAligned); }
protected override void Dispose(bool disposing) { FlushInternal(); long position = Position; if (m_stream != null) { m_stream.Dispose(); m_stream = null; } if (m_buffer != null) { m_buffer.Dispose(); m_buffer = null; } // set the correct length // this really isn't necessary for intermediate files (segments) if (m_truncateOnClose) { long length = m_actualLength > 0 ? m_actualLength : position; using (var stream = new FileStream(m_path, FileMode.Open, FileAccess.Write, FileShare.None, 8, FileOptions.WriteThrough)) { stream.SetLength(length); } } }
public FileStreamUnbufferedSequentialWrite(string path, long length, long startPosition, bool truncateOnClose) { m_path = path; m_id = IdentityManager.AcquireIdentity(string.Format("{0}:{1}", this.GetType().Name, m_path)); m_buffer = BufferManager.AcquireBuffer(m_id, true); m_sectorSize = PathUtil.GetDriveSectorSize(m_path); m_length = length; m_lengthAligned = (m_length + (m_sectorSize - 1)) & (~(long)(m_sectorSize - 1)); m_truncateOnClose = truncateOnClose; const FileMode mode = FileMode.OpenOrCreate; const FileAccess access = FileAccess.Write; const FileShare share = FileShare.None; const FileOptions options = (FileFlagNoBuffering | FileOptions.WriteThrough | FileOptions.SequentialScan); m_stream = new FileStream(m_path, mode, access, share, BUFFER_SIZE, options); m_stream.SetLength(m_lengthAligned); long startPositionAligned = ((startPosition + (m_sectorSize - 1)) & (~(long)(m_sectorSize - 1))) - m_sectorSize; if (startPositionAligned >= 0) m_stream.Seek(startPositionAligned, SeekOrigin.Begin); else startPositionAligned = 0; m_bufferIndex = (int)(startPosition - startPositionAligned); }
public static BufferInstance AcquireBuffer(Identity id, int size, bool pin) { // make sure size is reasonable BufferInstance buffer = null; lock (typeof(BufferManager)) { Stack <BufferInstance> availableBuffers = GetAvailableBuffers(size, false); if (availableBuffers != null && availableBuffers.Count > 0) { buffer = availableBuffers.Pop(); } else { var b = new byte[size]; buffer = new BufferInstance(b); c_bufferMapping.Add(b, buffer); } c_usedBuffers.Add(buffer, id); } if (pin && !buffer.Pinned) { buffer.PinBuffer(); } return(buffer); }
/// <summary> /// Initializes a new instance of the <see cref="PointCloudBinarySourceCompositeEnumerator"/> class. /// This version does not use a process, so that it can be managed by a composite. /// </summary> /// <param name="sources">The sources.</param> /// <param name="buffer">The buffer.</param> public PointCloudBinarySourceCompositeEnumerator(IEnumerable<IPointCloudBinarySourceEnumerable> sources, BufferInstance buffer) { m_sources = sources.ToArray(); m_process = null; m_buffer = buffer; m_points = m_sources.Sum(s => s.Count); Reset(); }
public PointCloudBinarySourceCompositeEnumerator(IEnumerable<IPointCloudBinarySourceEnumerable> sources, ProgressManagerProcess process) { m_sources = sources.ToArray(); m_process = process; m_buffer = m_process.AcquireBuffer(true); m_points = m_sources.Sum(s => s.Count); Reset(); }
public PointCloudTileSourceEnumeratorChunk(PointCloudTile tile, BufferInstance buffer) { m_tile = tile; m_buffer = buffer; m_pointSizeBytes = m_tile.TileSet.TileSource.PointSizeBytes; m_dataPtr = buffer.DataPtr; m_dataEndPtr = m_dataPtr + m_tile.StorageSize; }
public PointBufferWrapperChunk(int index, BufferInstance buffer, int byteIndex, int byteLength, short pointSizeBytes, float progress) { m_buffer = buffer; m_index = index; m_pointSizeBytes = pointSizeBytes; m_bytesRead = byteLength; m_pointsRead = m_bytesRead / m_pointSizeBytes; m_dataPtr = buffer.DataPtr + byteIndex; m_dataEndPtr = m_dataPtr + m_bytesRead; m_progress = progress; }
public PointBufferWrapperChunk(PointBufferWrapperChunk chunk, int pointCount) { m_buffer = chunk.m_buffer; m_index = chunk.m_index; m_pointSizeBytes = chunk.m_pointSizeBytes; m_pointsRead = pointCount; m_bytesRead = m_pointsRead * m_pointSizeBytes; m_dataPtr = chunk.m_dataPtr; m_dataEndPtr = m_dataPtr + m_bytesRead; m_progress = chunk.m_progress; }
/// <summary> /// Initializes a new instance of the <see cref="PointCloudBinarySourceEnumerator"/> class. /// This version does not use a process, so that it can be managed by a composite. /// </summary> /// <param name="source">The source.</param> /// <param name="buffer">The buffer.</param> public PointCloudBinarySourceEnumerator(IPointCloudBinarySourceSequentialEnumerable source, BufferInstance buffer) { m_source = source; m_stream = m_source.GetStreamReader(); m_buffer = buffer; m_process = null; m_endPosition = m_source.PointDataOffset + m_source.Count * m_source.PointSizeBytes; m_usableBytesPerBuffer = (m_buffer.Length / m_source.PointSizeBytes) * m_source.PointSizeBytes; Reset(); }
public PointCloudBinarySourceEnumerator(IPointCloudBinarySourceSequentialEnumerable source, ProgressManagerProcess process) { m_source = source; m_stream = m_source.GetStreamReader(); m_buffer = process.AcquireBuffer(true); m_process = process; m_endPosition = m_source.PointDataOffset + m_source.Count * m_source.PointSizeBytes; m_usableBytesPerBuffer = (m_buffer.Length / m_source.PointSizeBytes) * m_source.PointSizeBytes; Reset(); }
public FileStreamUnbufferedSequentialRead(string path, long startPosition) { m_path = path; m_id = IdentityManager.AcquireIdentity(string.Format("{0}:{1}", GetType().Name, m_path)); m_buffer = BufferManager.AcquireBuffer(m_id, true); m_sectorSize = PathUtil.GetDriveSectorSize(m_path); m_bufferValidSize = m_buffer.Length; const FileMode mode = FileMode.Open; const FileAccess access = FileAccess.Read; const FileShare share = FileShare.Read; const FileOptions options = (FileFlagNoBuffering | FileOptions.WriteThrough | FileOptions.SequentialScan); m_stream = new FileStream(m_path, mode, access, share, BUFFER_SIZE, options); m_streamEnd = new FileStream(m_path, mode, access, share, BUFFER_SIZE, FileOptions.WriteThrough); Seek(startPosition); }
public PointCloudTileSource(LASFile file, PointCloudTileSet tileSet, Statistics zStats) : base(file, tileSet.PointCount, tileSet.Extent, file.Header.Quantization, file.Header.OffsetToPointData, (short)file.Header.PointDataRecordLength) { m_file = file; m_id = IdentityManager.AcquireIdentity(GetType().Name); m_tileSet = tileSet; m_tileSet.TileSource = this; m_statisticsZ = zStats; m_statisticsQuantizedZ = zStats.ConvertToQuantized(Quantization); m_lowResBuffer = BufferManager.AcquireBuffer(m_id, tileSet.LowResCount * PointSizeBytes); m_file.UpdateEVLR(new LASRecordIdentifier("Jacere", 0), TileSet); m_file.UpdateEVLR(new LASRecordIdentifier("Jacere", 1), StatisticsZ); }
public static void ReleaseBuffer(BufferInstance buffer) { lock (typeof(BufferManager)) { if (!c_usedBuffers.ContainsKey(buffer)) { throw new Exception("attempted to release a buffer that is not in use"); } c_usedBuffers.Remove(buffer); var bufferList = GetAvailableBuffers(buffer.Length, true); bufferList.Push(buffer); } if (UNPIN_ON_RELEASE && buffer.Pinned) { buffer.UnpinBuffer(); } }
public void Dispose() { if (!StreamManager.IsSharedStream(this)) { if (m_stream != null) { m_stream.Dispose(); m_stream = null; } if (m_streamEnd != null) { m_streamEnd.Dispose(); m_streamEnd = null; } if (m_buffer != null) { m_buffer.Dispose(); m_buffer = null; } } }
public static BufferInstance AcquireBuffer(Identity id, int size, bool pin) { // make sure size is reasonable BufferInstance buffer = null; lock (typeof(BufferManager)) { Stack<BufferInstance> availableBuffers = GetAvailableBuffers(size, false); if (availableBuffers != null && availableBuffers.Count > 0) { buffer = availableBuffers.Pop(); } else { var b = new byte[size]; buffer = new BufferInstance(b); c_bufferMapping.Add(b, buffer); } c_usedBuffers.Add(buffer, id); } if (pin && !buffer.Pinned) buffer.PinBuffer(); return buffer; }
public unsafe void LoadTileGrid(PointCloudTile tile, BufferInstance inputBuffer, Grid<float> grid, Grid<int> quantizedGrid) { Open(); var quantizedExtent = tile.QuantizedExtent; double cellSizeX = (double)quantizedExtent.RangeX / grid.SizeX; double cellSizeY = (double)quantizedExtent.RangeY / grid.SizeY; #warning Why did I do FillVal it this way? //grid.FillVal = -1.0f; grid.Reset(); quantizedGrid.Reset(); byte* inputBufferPtr = inputBuffer.DataPtr; int bytesRead = tile.ReadTile(m_inputStream, inputBuffer.Data); byte* pb = inputBufferPtr; byte* pbEnd = inputBufferPtr + tile.StorageSize; while (pb < pbEnd) { var p = (SQuantizedPoint3D*)pb; pb += PointSizeBytes; var pixelX = (int)(((*p).X - quantizedExtent.MinX) / cellSizeX); var pixelY = (int)(((*p).Y - quantizedExtent.MinY) / cellSizeY); // max val for now, apparently if ((*p).Z > quantizedGrid.Data[pixelY, pixelX]) quantizedGrid.Data[pixelY, pixelX] = (*p).Z; } quantizedGrid.CorrectMaxOverflow(); quantizedGrid.CopyToUnquantized(grid, Quantization, Extent); }
public IPointCloudBinarySourceEnumerator GetBlockEnumerator(BufferInstance buffer) { return new PointCloudBinarySourceCompositeEnumerator(m_sources, buffer); }
public PointCloudTileSource TilePointFileIndex(LASFile tiledFile, BufferInstance segmentBuffer, ProgressManager progressManager) { var stopwatch = new Stopwatch(); stopwatch.Start(); var analysis = AnalyzePointFile(segmentBuffer.Length, progressManager); var quantizedExtent = m_source.QuantizedExtent; var tileCounts = analysis.Density.GetTileCountsForInitialization(); var fileSize = tiledFile.PointDataOffset + (m_source.PointSizeBytes * m_source.Count); AttemptFastAllocate(tiledFile.FilePath, fileSize); var lowResPointCountMax = PROPERTY_MAX_LOWRES_POINTS.Value; var lowResBuffer = BufferManager.AcquireBuffer(m_id, lowResPointCountMax * m_source.PointSizeBytes); var lowResWrapper = new PointBufferWrapper(lowResBuffer, m_source.PointSizeBytes, lowResPointCountMax); var validTiles = analysis.GridIndex.Sum(r => r.GridRange.ValidCells); var lowResPointsPerTile = lowResPointCountMax / validTiles; var lowResTileSize = (ushort)Math.Sqrt(lowResPointsPerTile); var lowResGrid = Grid<int>.Create(lowResTileSize, lowResTileSize, true, -1); var lowResCounts = tileCounts.Copy<int>(); using (var outputStream = StreamManager.OpenWriteStream(tiledFile.FilePath, fileSize, tiledFile.PointDataOffset)) { var i = 0; foreach (var segment in analysis.GridIndex) { progressManager.Log("~ Processing Index Segment {0}/{1}", ++i, analysis.GridIndex.Count); var sparseSegment = m_source.CreateSparseSegment(segment); var sparseSegmentWrapper = new PointBufferWrapper(segmentBuffer, sparseSegment); var tileRegionFilter = new TileRegionFilter(tileCounts, quantizedExtent, segment.GridRange); // this call will fill the buffer with points, add the counts, and sort QuantTilePointsIndexed(sparseSegment, sparseSegmentWrapper, tileRegionFilter, tileCounts, lowResWrapper, lowResGrid, lowResCounts, progressManager); var segmentFilteredPointCount = tileRegionFilter.GetCellOrdering().Sum(t => tileCounts.Data[t.Row, t.Col]); var segmentFilteredBytes = segmentFilteredPointCount * sparseSegmentWrapper.PointSizeBytes; // write out the buffer using (var process = progressManager.StartProcess("WriteIndexSegment")) { var segmentBufferIndex = 0; foreach (var tile in segment.GridRange.GetCellOrdering()) { var tileCount = tileCounts.Data[tile.Row, tile.Col]; if (tileCount > 0) { var tileSize = (tileCount - lowResCounts.Data[tile.Row, tile.Col]) * sparseSegmentWrapper.PointSizeBytes; outputStream.Write(sparseSegmentWrapper.Data, segmentBufferIndex, tileSize); segmentBufferIndex += tileSize; if (!process.Update((float)segmentBufferIndex / segmentFilteredBytes)) break; } } } if (progressManager.IsCanceled()) break; } // write low-res var lowResActualPointCount = lowResCounts.Data.Cast<int>().Sum(); outputStream.Write(lowResWrapper.Data, 0, lowResActualPointCount * lowResWrapper.PointSizeBytes); } var actualDensity = new PointCloudTileDensity(tileCounts, m_source.Quantization); var tileSet = new PointCloudTileSet(m_source, actualDensity, tileCounts, lowResCounts); var tileSource = new PointCloudTileSource(tiledFile, tileSet, analysis.Statistics); if (!progressManager.IsCanceled()) tileSource.IsDirty = false; tileSource.WriteHeader(); return tileSource; }
public IPointCloudBinarySourceEnumerator GetBlockEnumerator(BufferInstance buffer) { return new PointCloudBinarySourceEnumerator(this, buffer); }
public static void ReleaseBuffer(BufferInstance buffer) { lock (typeof(BufferManager)) { if (!c_usedBuffers.ContainsKey(buffer)) throw new Exception("attempted to release a buffer that is not in use"); c_usedBuffers.Remove(buffer); var bufferList = GetAvailableBuffers(buffer.Length, true); bufferList.Push(buffer); } if (UNPIN_ON_RELEASE && buffer.Pinned) buffer.UnpinBuffer(); }
public IPointCloudBinarySourceEnumerator GetBlockEnumerator(BufferInstance buffer) { throw new NotImplementedException(); }
private static void ReleaseBuffers(BufferInstance[] buffers) { lock (typeof(BufferManager)) { foreach (var buffer in buffers) ReleaseBuffer(buffer); } }