public T Get(int x, int y, int z) { var searchLocation = _blockToInterval[General.BlockIndex(x, y, z, Width, Height)]; var index = _tree.BinarySearch(new Interval <T>(searchLocation, default(T))); return(index < 0 ? default(T) : _tree[index].Data); }
public void Update(IEnumerable <BatchUpdateItem <T> > updates) { foreach (var updateItem in updates) { var pos = updateItem.Position; Update(General.BlockIndex(pos.x, pos.y, pos.z, Width, Height), updateItem.Item); } }
public bool Update([NotNull] IEnumerable <BatchUpdateItem <T> > items, int timeout = 0) { if (timeout <= 0) { _lock.EnterWriteLock(); } else if (!_lock.TryEnterWriteLock(timeout)) { Interlocked.Exchange(ref _lastTouched, DateTime.Now.Ticks); return(false); } if (Interlocked.CompareExchange(ref _thread, Thread.CurrentThread, null) != null) { Debug.LogError("Batch update failed."); _lock.ExitWriteLock(); Interlocked.Exchange(ref _lastTouched, DateTime.Now.Ticks); return(false); } var success = false; switch (_mode) { case List: foreach (var pair in items) { var index = General.BlockIndex(pair.Position.x, pair.Position.y, pair.Position.z, Width); _array[index] = pair.Item; } success = true; break; case Interval: _intervalTree.Update(items); if (_intervalTree.Modifications > ConfigManager.Properties.MaxIntervalTreeMods) { QueueDecompress(); } success = true; break; case Empty: Debug.LogError("CompressibleArray is empty and cannot update a item."); break; default: Debug.LogError("Batch update failed. Mode is undefined"); break; } Interlocked.Exchange(ref _thread, null); _lock.ExitWriteLock(); Interlocked.Exchange(ref _lastTouched, DateTime.Now.Ticks); return(success); }
public bool Update(int x, int y, int z, T item, int timeout = 0) { var @lock = _thread != Thread.CurrentThread; if (@lock) { if (timeout <= 0) { _lock.EnterWriteLock(); } else if (!_lock.TryEnterWriteLock(timeout)) { Interlocked.Exchange(ref _lastTouched, DateTime.Now.Ticks); return(false); } } var success = false; switch (_mode) { case List: var index = General.BlockIndex(x, y, z, Width); _array[index] = item; success = true; break; case Interval: _intervalTree.Update(x, y, z, item); if (_intervalTree.Modifications > ConfigManager.Properties.MaxIntervalTreeMods) { QueueDecompress(); } success = true; break; case Empty: Debug.LogError("CompressibleArray is empty and cannot update a item."); break; default: Debug.LogError("Update failed. Mode is undefined"); break; } if (@lock) { _lock.ExitWriteLock(); } Interlocked.Exchange(ref _lastTouched, DateTime.Now.Ticks); return(success); }
public IEnumerable <BatchUpdateItem <T> > GetNeighborhood(int cX, int cY, int cZ, int radius = 1) { var @lock = _thread != Thread.CurrentThread; if (@lock) { _lock.EnterReadLock(); } for (var x = cX - radius; x < cX + radius; ++x) { for (var y = cY - radius; y < cY + radius; ++y) { for (var z = cZ - radius; z < cZ + radius; ++z) { if (x < 0 || x >= Width || y < 0 || y >= Height || z < 0 || z >= Width) { continue; } switch (_mode) { case List: var index = General.BlockIndex(x, y, z, Width, Height); yield return(new BatchUpdateItem <T>(new Vector3Int(x, y, z), _array[index])); break; case Interval: yield return(new BatchUpdateItem <T>(new Vector3Int(x, y, z), _intervalTree.Get(x, y, z))); break; case Empty: Debug.LogError("CompressibleArray is empty and cannot return a item."); break; default: Debug.LogError("Get failed. Mode is undefined"); break; } } } } if (@lock) { _lock.ExitReadLock(); } Interlocked.Exchange(ref _lastTouched, DateTime.Now.Ticks); }
public T Get(int x, int y, int z) { var @lock = _thread != Thread.CurrentThread; if (@lock) { _lock.EnterReadLock(); } T result; switch (_mode) { case List: var index = General.BlockIndex(x, y, z, Width, Height); result = _array[index]; break; case Interval: result = _intervalTree.Get(x, y, z); break; case Empty: result = default(T); Debug.LogError("CompressibleArray is empty and cannot return a item."); break; default: result = default(T); Debug.LogError("Get failed. Mode is undefined"); break; } if (@lock) { _lock.ExitReadLock(); } Interlocked.Exchange(ref _lastTouched, DateTime.Now.Ticks); return(result); }
public ColumnCompressor(int width, int height) { _width = width; _height = height; var key = new Vector3Int(_width, _height, _width); if (!IntervalToBlockMappings.ContainsKey(key)) { var count = _width * _height * _width; var map = new int[count]; var inverse = new int[count]; var index = 0; var flipY = false; var flipZ = false; for (var x = 0; x < _width; x++) { int incZ, z, limZ; if (flipZ) { incZ = -1; z = _width - 1; limZ = 0; } else { incZ = 1; z = 0; limZ = _width; } while ((flipZ && z >= limZ) || (!flipZ && z < limZ)) { int incY, y, limY; if (flipY) { incY = 1; y = 0; limY = _height; } else { incY = -1; y = _height - 1; limY = 0; } while ((flipY && y < limY) || (!flipY && y >= limY)) { var blockIndex = General.BlockIndex(x, y, z, _width, _height); map[index] = blockIndex; inverse[blockIndex] = index; ++index; y += incY; } z += incZ; flipY = !flipY; } flipZ = !flipZ; } IntervalToBlockMappings.TryAdd(key, map); BlockToIntervalMappings.TryAdd(key, inverse); } IntervalToBlock = IntervalToBlockMappings[key]; BlockToInterval = BlockToIntervalMappings[key]; }
/// <summary> /// Insertions are slow. Does not condense runs. /// </summary> public void Update(int x, int y, int z, T update) { Update(General.BlockIndex(x, y, z, Width, Height), update); }
private static int GetNextIndex(ScanDirection direction, int chunkSize, ref Vector3Int workCoords) { var x = workCoords.x; var y = workCoords.y; var z = workCoords.z; switch (direction) { case ScanDirection.Xyz: workCoords.x = (x + 1) % chunkSize; if (workCoords.x < x) { workCoords.y = (y + 1) % chunkSize; if (workCoords.y < y) { workCoords.z = (z + 1) % chunkSize; } } break; case ScanDirection.Xzy: workCoords.x = (x + 1) % chunkSize; if (workCoords.x < x) { workCoords.z = (z + 1) % chunkSize; if (workCoords.z < z) { workCoords.y = (y + 1) % chunkSize; } } break; case ScanDirection.Yxz: workCoords.y = (y + 1) % chunkSize; if (workCoords.y < y) { workCoords.x = (x + 1) % chunkSize; if (workCoords.x < x) { workCoords.z = (z + 1) % chunkSize; } } break; case ScanDirection.Yzx: workCoords.y = (y + 1) % chunkSize; if (workCoords.y < y) { workCoords.z = (z + 1) % chunkSize; if (workCoords.z < z) { workCoords.x = (x + 1) % chunkSize; } } break; case ScanDirection.Zxy: workCoords.z = (z + 1) % chunkSize; if (workCoords.z < z) { workCoords.x = (x + 1) % chunkSize; if (workCoords.x < x) { workCoords.y = (y + 1) % chunkSize; } } break; case ScanDirection.Zyx: workCoords.z = (z + 1) % chunkSize; if (workCoords.z < z) { workCoords.y = (y + 1) % chunkSize; if (workCoords.y < y) { workCoords.x = (x + 1) % chunkSize; } } break; default: throw new ArgumentOutOfRangeException(nameof(direction)); } return(General.BlockIndex(workCoords, chunkSize)); }
public BlockCompressor(int chunkSize) { _chunkSize = chunkSize; if (!IntervalToBlockMappings.ContainsKey(_chunkSize)) { var count = _chunkSize * _chunkSize * _chunkSize; var mappingFunctions = new Dictionary <CompressionFlag, int[]>(); var bitsPerAxis = (int)Math.Ceiling(Math.Log(chunkSize, 2)); var hilbertToBlockIndex = new int[count]; for (uint index = 0; index < count; ++index) { var arr = HilbertCurve.HilbertAxes(index, bitsPerAxis); var blockIndex = General.BlockIndex(arr.x, arr.y, arr.z, _chunkSize); hilbertToBlockIndex[index] = blockIndex; } mappingFunctions[CompressionFlag.Hilbert] = hilbertToBlockIndex; foreach (ScanDirection scanDirection in Enum.GetValues(typeof(ScanDirection))) { var workCoords = new Vector3Int(); var mapping = new int[count]; for (var i = 0; i < count; ++i) { mapping[GetNextIndex(scanDirection, _chunkSize, ref workCoords)] = i; } CompressionFlag compressionFlag; switch (scanDirection) { case ScanDirection.Xyz: compressionFlag = CompressionFlag.LinearXyz; break; case ScanDirection.Xzy: compressionFlag = CompressionFlag.LinearXzy; break; case ScanDirection.Yxz: compressionFlag = CompressionFlag.LinearYxz; break; case ScanDirection.Yzx: compressionFlag = CompressionFlag.LinearYzx; break; case ScanDirection.Zxy: compressionFlag = CompressionFlag.LinearZxy; break; case ScanDirection.Zyx: compressionFlag = CompressionFlag.LinearZyx; break; default: throw new ArgumentOutOfRangeException(); } mappingFunctions[compressionFlag] = mapping; } IntervalToBlockMappings[chunkSize] = mappingFunctions; BlockToIntervalMappings[chunkSize] = new Dictionary <CompressionFlag, int[]>(); foreach (var pair in IntervalToBlockMappings[chunkSize]) { var mappingFunction = pair.Value; var inverseFunction = new int[mappingFunction.Length]; for (int i = 0; i < mappingFunction.Length; i++) { inverseFunction[mappingFunction[i]] = i; } BlockToIntervalMappings[chunkSize][pair.Key] = inverseFunction; } } _intervalToBlock = IntervalToBlockMappings[chunkSize]; _blockToInterval = BlockToIntervalMappings[chunkSize]; }