public bool WriteOrStoreChunk(OrderedChunk chunk) { var chunksCount = _getChunksCount(); _sortedDictionary.Add(chunk.Order, chunk); while (_sortedDictionary.ContainsKey(_chunksWritten)) { var orderedChunk = _sortedDictionary[_chunksWritten]; Write(orderedChunk); orderedChunk.RentedData.Dispose(); _sortedDictionary.Remove(_chunksWritten); _chunksWritten++; } Flush(); var writeEnded = _chunksWritten == chunksCount; if (writeEnded) { _writeCompletedEvent.Set(); } return(writeEnded); }
void show_chunk(OrderedChunk chunk) { if (chunk.mesh != null && chunk.mesh.submesh != null) { chunk.mesh.submesh.SetVisible(true); } }
void hide_chunk(OrderedChunk chunk) { if (chunk.mesh != null && chunk.mesh.submesh != null) { chunk.mesh.submesh.SetVisible(false); } }
// update visible triangles for this chunk void update_triangles(OrderedChunk chunk, float max_scalar) { if (chunk.mesh == null) { chunk.mesh = new MeshChunk(); } int count = 0; foreach (int idx in chunk.order_range) { if (tri_ordering[idx].scalar < max_scalar) { count++; } } // if we have the same count, we can keep it if (chunk.mesh.current_count == count) { chunk.mesh.submesh.SetVisible(true); return; } // find subset triangles int[] triangles = new int[count]; for (int k = 0; k < count; ++k) { int idx = chunk.order_range.a + k; triangles[k] = tri_ordering[idx].tid; } // find submesh // [TODO] faster variant of this? Also could be computing these in background... DSubmesh3 submesh = new DSubmesh3(Mesh, triangles); MeshTransforms.VertexNormalOffset(submesh.SubMesh, NormalOffsetDistance); fMesh umesh = UnityUtil.DMeshToUnityMesh(submesh.SubMesh, false); // create or update GO if (chunk.mesh.submesh == null) { chunk.mesh.submesh = new fMeshGameObject(umesh, true, false); if (ChunkMeshMaterial != null) { chunk.mesh.submesh.SetMaterial(ChunkMeshMaterial); } if (ChunkMeshParent != null) { ChunkMeshParent.AddChild(chunk.mesh.submesh, false); } } else { chunk.mesh.submesh.UpdateMesh(umesh, true, false); } chunk.mesh.submesh.SetVisible(true); chunk.mesh.current_count = count; }
public override RentedArray <byte> GetProcessedData(OrderedChunk chunk) { var length = ArchiveSizeCalculator.CalculateArchiveMaxSizeInBytes(chunk.RentedData.RentedLength); var rentedArray = GzipArrayPool.SharedBytesPool.Rent(length); using var compressedStream = new MemoryStream(rentedArray, 0, length, true); compressedStream.SetLength(0); using var gZipStream = new GZipStream(compressedStream, CompressionMode.Compress); gZipStream.Write(chunk.RentedData.AsBoundedSpan); gZipStream.Flush(); return(new RentedArray <byte>(rentedArray, (int)compressedStream.Length, GzipArrayPool.SharedBytesPool)); }
public void ComputeSelection(MeshFaceSelection selection) { for (int ci = 0; ci <= current_partial_chunk; ++ci) { OrderedChunk chunk = ordered_chunks[ci]; if (chunk.mesh == null) { continue; } for (int k = 0; k < chunk.mesh.current_count; ++k) { int idx = chunk.order_range.a + k; selection.Select(tri_ordering[idx].tid); } } }
public void Process(OrderedChunk chunk) { RentedArray <byte> processedData = _gzipProcessor.GetProcessedData(chunk); chunk.RentedData.Dispose(); var orderedChunk = new OrderedChunk { RentedData = processedData, Order = chunk.Order }; var writingFunction = new Function(nameof(IChunkedWriter.WriteOrStoreChunk), () => _chunkedWriter.WriteOrStoreChunk(orderedChunk)); _ioBoundQueue.Enqueue(writingFunction); _readSlotsSemaphore.Release(); }
public override RentedArray <byte> GetProcessedData(OrderedChunk chunk) { var length = ApplicationConstants.BufferSizeInBytes; var rentedBytes = GzipArrayPool.SharedBytesPool.Rent(length); using var decompressedStream = new MemoryStream(rentedBytes, 0, length, true); decompressedStream.SetLength(0); using var compressedDataStream = new MemoryStream(chunk.RentedData.Array, 0, chunk.RentedData.RentedLength); using var gZipStream = new GZipStream(compressedDataStream, CompressionMode.Decompress); try { gZipStream.CopyTo(decompressedStream); gZipStream.Flush(); } catch (InvalidDataException e) { throw new InvalidArchiveFormatException(UserMessages.ArchiveFormatIsNotSupported, e); } return(new RentedArray <byte>(rentedBytes, (int)decompressedStream.Length, GzipArrayPool.SharedBytesPool)); }
void update_ordered_chunks() { if (ordered_chunks != null) { discard_ordered_chunks(); } int N = tri_ordering.Length / ChunkSize; if (N == 0 || tri_ordering.Length % ChunkSize != 0) { N++; } ordered_chunks = new OrderedChunk[N]; ordered_chunks[0] = new OrderedChunk() { order_range = Interval1i.Empty, scalar_range = Interval1d.Empty }; int ci = 0, cn = 0; for (int k = 0; k < tri_ordering.Length; ++k) { if (cn == ChunkSize) { ci++; ordered_chunks[ci] = new OrderedChunk() { order_range = Interval1i.Empty, scalar_range = Interval1d.Empty }; cn = 0; } ordered_chunks[ci].order_range.Contain(k); ordered_chunks[ci].scalar_range.Contain(tri_ordering[k].scalar); cn++; } }
protected override void Write(OrderedChunk chunk) { _stream.Write(chunk.RentedData.AsBoundedSpan); }
/// <summary> /// Writes chunk to underlying <see cref="Stream"/> /// </summary> protected abstract void Write(OrderedChunk chunk);
protected override void Write(OrderedChunk chunk) { _binaryWriter.Write(chunk.RentedData.RentedLength); _binaryWriter.Write(chunk.RentedData.AsBoundedSpan); }