/// <summary> /// Read bytes from stream into buffer slice /// </summary> private void ReadStream(Stream stream, long position, BufferSlice buffer) { // can't test "Length" from out-to-date stream // ENSURE(stream.Length <= position - PAGE_SIZE, "can't be read from beyond file length"); stream.Position = position; stream.Read(buffer.Array, buffer.Offset, buffer.Count); }
public EnginePragmas(BufferSlice buffer, HeaderPage headerPage) : this(headerPage) { foreach (var pragma in _pragmas.Values) { pragma.Read(buffer); } _isDirty = false; }
private async Task InitializeAsync() { var read = await _source.MoveNextAsync(); if (!read) { _isEOF = true; } _current = _source.Current; }
/// <summary> /// Write buffer container data into disk /// </summary> public void Write(long position, BufferSlice buffer) { var writer = _pool.Value.Writer; // there is only a single writer instance, must be lock to ensure only 1 single thread are writing lock (writer) { writer.Position = position; writer.Write(buffer.Array, buffer.Offset, _containerSize); } }
/// <summary> /// Initialize reader based on Stream (if data was persisted in disk) or Buffer (if all data fit in only 1 container) /// </summary> public void InitializeReader(Stream stream, BufferSlice buffer, bool utcDate) { if (stream != null) { _reader = new BufferReader(this.GetSourceFromStream(stream), utcDate); } else { _reader = new BufferReader(buffer, utcDate); } this.MoveNext(); }
public SortService(SortDisk disk, int order, EnginePragmas pragmas) { _disk = disk; _order = order; _pragmas = pragmas; _containerSize = disk.ContainerSize; _reader = new Lazy <Stream>(() => _disk.GetReader()); var bytes = BufferPool.Rent(disk.ContainerSize); _buffer = new BufferSlice(bytes, 0, _containerSize); }
/// <summary> /// Create a fake index node used only in Virtual Index runner /// </summary> public IndexNode(BsonDocument doc) { _page = null; _segment = new BufferSlice(new byte[0], 0, 0); this.Slot = 0; this.Position = new PageAddress(0, 0); this.Level = 0; this.DataBlock = PageAddress.Empty; // index node key IS document this.Key = doc; }
public void UpdateBuffer(BufferSlice buffer) { if (_isDirty == false) { return; } foreach (var pragma in _pragmas) { pragma.Value.Write(buffer); } _isDirty = false; }
/// <summary> /// Read new DataBlock from filled page segment /// </summary> public DataBlock(DataPage page, byte index, BufferSlice segment) { _page = page; _segment = segment; this.Position = new PageAddress(page.PageID, index); // byte 00: Extend this.Extend = segment.ReadBool(P_EXTEND); // byte 01-05: NextBlock (PageID, Index) this.NextBlock = segment.ReadPageAddress(P_NEXT_BLOCK); // byte 06-EOL: Buffer this.Buffer = segment.Slice(P_BUFFER, segment.Count - P_BUFFER); }
/// <summary> /// Get 8k buffer slices inside file container /// </summary> private IEnumerable <BufferSlice> GetSourceFromStream(Stream stream) { var bytes = BufferPool.Rent(PAGE_SIZE); var buffer = new BufferSlice(bytes, 0, PAGE_SIZE); while (_readPosition < _size) { stream.Position = this.Position + _readPosition; stream.Read(bytes, 0, PAGE_SIZE); _readPosition += PAGE_SIZE; yield return(buffer); } BufferPool.Return(bytes); }
/// <summary> /// Create new DataBlock and fill into buffer /// </summary> public DataBlock(DataPage page, byte index, BufferSlice segment, bool extend, PageAddress nextBlock) { _page = page; _segment = segment; this.Position = new PageAddress(page.PageID, index); this.NextBlock = nextBlock; this.Extend = extend; // byte 00: Data Index segment.Write(extend, P_EXTEND); // byte 01-05 (can be updated in "UpdateNextBlock") segment.Write(nextBlock, P_NEXT_BLOCK); // byte 06-EOL: Buffer this.Buffer = segment.Slice(P_BUFFER, segment.Count - P_BUFFER); page.IsDirty = true; }
/// <summary> /// Read index node from page segment (lazy-load) /// </summary> public IndexNode(IndexPage page, byte index, BufferSlice segment) { _page = page; _segment = segment; this.Position = new PageAddress(page.PageID, index); this.Slot = segment.ReadByte(P_SLOT); this.Level = segment.ReadByte(P_LEVEL); this.DataBlock = segment.ReadPageAddress(P_DATA_BLOCK); this.NextNode = segment.ReadPageAddress(P_NEXT_NODE); this.Next = new PageAddress[this.Level]; this.Prev = new PageAddress[this.Level]; for (var i = 0; i < this.Level; i++) { this.Prev[i] = segment.ReadPageAddress(P_PREV_NEXT + (i * PageAddress.SIZE * 2)); this.Next[i] = segment.ReadPageAddress(P_PREV_NEXT + (i * PageAddress.SIZE * 2) + PageAddress.SIZE); } this.Key = segment.ReadIndexKey(P_KEY); }
public BufferWriter(BufferSlice buffer) { _source = null; _current = buffer; }
public void Insert(IEnumerable <KeyValuePair <BsonValue, PageAddress> > items, int order, BufferSlice buffer) { var query = order == Query.Ascending ? items.OrderBy(x => x.Key, _collation) : items.OrderByDescending(x => x.Key, _collation); var offset = 0; foreach (var item in query) { buffer.WriteIndexKey(item.Key, offset); var keyLength = IndexNode.GetKeyLength(item.Key, false); if (keyLength > MAX_INDEX_KEY_LENGTH) { throw LiteException.InvalidIndexKey($"Sort key must be less than {MAX_INDEX_KEY_LENGTH} bytes."); } offset += keyLength; buffer.Write(item.Value, offset); offset += PageAddress.SIZE; _remaining++; } _count = _remaining; }
public void Insert(IEnumerable <KeyValuePair <BsonValue, PageAddress> > items, int order, BufferSlice buffer) { var query = order == Query.Ascending ? items.OrderBy(x => x.Key) : items.OrderByDescending(x => x.Key); var offset = 0; foreach (var item in query) { buffer.WriteIndexKey(item.Key, offset); offset += GetKeyLength(item.Key); buffer.Write(item.Value, offset); offset += PageAddress.SIZE; _remaining++; } _count = _remaining; }