/// <summary> /// Create a new table builder. /// - storageOptions define the options for the table buildup. /// - dataStream is where the data for the table will be written to. /// REQUIRES: Being able to read dataStream.Position /// - tempStream is where temporary data is written to avoid holding too much in memory /// REQUIRES: Being able to read tempStream.Position AND change tempStream.Position /// </summary> public TableBuilder(StorageState storageState, Stream dataStream, TemporaryFiles temporaryFiles) { _temporaryFiles = temporaryFiles; try { _storageState = storageState; _dataStream = dataStream; _indexStream = temporaryFiles.Create(); _originalIndexStreamPosition = _indexStream.Position; _lastKeyBuffer = _storageState.Options.BufferPool.Take(storageState.Options.MaximumExpectedKeySize); _scratchBuffer = _storageState.Options.BufferPool.Take(storageState.Options.MaximumExpectedKeySize); if (storageState.Options.FilterPolicy != null) { var filterBuilder = storageState.Options.FilterPolicy.CreateBuilder(); _filterBlockStream = temporaryFiles.Create(); _filterBuilder = new FilterBlockBuilder(_filterBlockStream, filterBuilder); _filterBuilder.StartBlock(0); } _indexBlock = new BlockBuilder(_indexStream, storageState, _storageState.InternalKeyComparator, blockRestartInterval: 1); _dataBlock = new BlockBuilder(_dataStream, storageState, _storageState.InternalKeyComparator, _storageState.Options.BlockRestartInterval); } catch (Exception) { Dispose(); throw; } }
public void T1() { var storageState = new StorageState("test", new StorageOptions()); var random = new Random(); var tables = new List<MemTable>(); var expectedCount = 0; try { ulong seq = 0; for (var i = 0; i < 100; i++) { var table = new MemTable(storageState); for (var j = 0; j < 1000; j++) { var k = random.Next(); var key = string.Format("{0:0000000000000000}", k); table.Add(seq++, ItemType.Value, key, null); expectedCount++; } tables.Add(table); } var iterators = tables.Select(table => table.NewIterator()).ToList(); var comparator = new InternalKeyComparator(new CaseInsensitiveComparator()); using (var iterator = new MergingIterator(comparator, iterators)) { var actualCount = 0; iterator.SeekToFirst(); Assert.True(iterator.IsValid); Slice prev = string.Empty; while (iterator.IsValid) { if (!prev.IsEmpty()) { Assert.True(comparator.Compare(iterator.Key, prev) > 0); } prev = iterator.Key.Clone(); iterator.Next(); actualCount++; } Assert.Equal(expectedCount, actualCount); } } finally { foreach (var table in tables) table.Dispose(); } }
public DbIterator(StorageState storageContext, IIterator iterator, ulong sequence) { this.iterator = iterator; this.sequence = sequence; this.storageContext = storageContext; direction = Direction.Forward; IsValid = false; }
public void Empty() { var storageState = new StorageState("test", new StorageOptions()); using (var memtable = new MemTable(storageState)) { Stream stream; Assert.False(memtable.TryGet("test", 1, out stream)); Assert.Null(stream); } }
public void WillNotShowValueFromLaterSnapshot() { var storageState = new StorageState("test", new StorageOptions()); using (var memtable = new MemTable(storageState)) { memtable.Add(2, ItemType.Value, "test", memtable.Write(new MemoryStream(new byte[] { 1, 2, 3 }))); Stream stream; Assert.False(memtable.TryGet("test", 1, out stream)); } }
public AggregationEngine(string path = null) { _path = path ?? Path.GetTempPath(); var storageState = new StorageState(path, new StorageOptions()); if (path == null) storageState.FileSystem = new InMemoryFileSystem("memory"); _storage = new Storage.Storage(storageState); _sequentialUuidGenerator = new SequentialUuidGenerator { EtagBase = 1 }; }
public BlockBuilder(Stream stream, StorageState storageState, IComparator comparator, int blockRestartInterval) { _storageState = storageState; if (blockRestartInterval < 1) throw new InvalidOperationException("BlockRestartInternal must be >= 1"); _stream = new CrcStream(stream); IsEmpty = true; OriginalPosition = stream.Position; _comparator = comparator; _blockRestartInterval = blockRestartInterval; _lastKeyBuffer = storageState.Options.BufferPool.Take(storageState.Options.MaximumExpectedKeySize); }
public VersionSet(StorageState storageContext) { this.storageContext = storageContext; NextFileNumber = 2; LogNumber = 0; ManifestFileNumber = 0; CompactionPointers = new Slice[Config.NumberOfLevels]; AppendVersion(new Version(storageContext, this)); }
public void DeletesWillHideValues() { var storageState = new StorageState("test", new StorageOptions()); using (var memtable = new MemTable(storageState)) { memtable.Add(2, ItemType.Value, "test", memtable.Write(new MemoryStream(new byte[] { 1, 2, 3 }))); memtable.Add(3, ItemType.Deletion, "test", null); Stream stream; Assert.True(memtable.TryGet("test", 5, out stream)); Assert.Null(stream); } }
private Version(StorageState storageContext) { this.storageContext = storageContext; Files = new List<FileMetadata>[Config.NumberOfLevels]; FileToCompact = null; FileToCompactLevel = -1; CompactionScore = -1; CompactionLevel = -1; for (var level = 0; level < Config.NumberOfLevels; level++) { Files[level] = new List<FileMetadata>(); } }
public void CanAddAndGetUsingLaterSnapshot() { var storageState = new StorageState("test", new StorageOptions()); using (var memtable = new MemTable(storageState)) { memtable.Add(1, ItemType.Value, "test", memtable.Write(new MemoryStream(new byte[] { 1, 2, 3 }))); Stream stream; Assert.True(memtable.TryGet("test", 2, out stream)); using (stream) { Assert.Equal(1, stream.ReadByte()); Assert.Equal(2, stream.ReadByte()); Assert.Equal(3, stream.ReadByte()); } } }
public void CanReadValuesBack() { var state = new StorageState("none", new StorageOptions { ParanoidChecks = true, FilterPolicy = new BloomFilterPolicy() }); const int count = 5; string name; using (var file = CreateFile()) { name = file.Name; using (var tblBuilder = new TableBuilder(state, file, new TemporaryFiles(state.FileSystem, 1))) { for (int i = 0; i < count; i++) { string k = "tests/" + i.ToString("0000"); tblBuilder.Add(new InternalKey(k, 1, ItemType.Value).TheInternalKey, new MemoryStream(Encoding.UTF8.GetBytes("values/" + i))); } tblBuilder.Finish(); file.Flush(true); } } using (var mmf = MemoryMappedFile.CreateFromFile(name, FileMode.Open)) { var length = new FileInfo(name).Length; using (var table = new Table(state, new FileData(new MemoryMappedFileAccessor(name, mmf), length))) using (var iterator = table.CreateIterator(new ReadOptions())) { for (int i = 0; i < count; i++) { string k = "tests/" + i.ToString("0000"); iterator.Seek(new InternalKey(k, 1000, ItemType.Value).TheInternalKey); Assert.True(iterator.IsValid); using (var stream = iterator.CreateValueStream()) using (var reader = new StreamReader(stream)) { Assert.Equal("values/" + i, reader.ReadToEnd()); } } } } }
public Snapshooter(StorageState storageContext) { this.storageContext = storageContext; snapshots = new ConcurrentDictionary<Snapshot, object>(); }
public StorageReader(StorageState state) { this.state = state; }
public Storage(StorageState storageState) { _storageState = storageState; }
public Storage(string name, StorageOptions options) { _storageState = new StorageState(name, options); }
public MemTable(StorageState storageContext) : this(storageContext.Options.WriteBatchSize, storageContext.InternalKeyComparator, storageContext.Options.BufferPool) { }
public StorageCommands(StorageState state) { this.state = state; }
public StorageWriter(StorageState state) { _state = state; }
internal static Task WriteToLogAsync(WriteBatch[] writes, ulong seq, StorageState state, WriteOptions options) { return Task.Factory.StartNew( () => { try { var opCount = writes.Sum(x => x._operations.Count); if (log.IsDebugEnabled) log.Debug("Writing {0} operations in seq {1}", opCount, seq); state.LogWriter.RecordStarted(); var buffer = new byte[12]; Bit.Set(buffer, 0, seq); Bit.Set(buffer, 8, opCount); state.LogWriter.Write(buffer, 0, 12); foreach (var operation in writes.SelectMany(writeBatch => writeBatch._operations)) { buffer[0] = (byte)operation.Op; state.LogWriter.Write(buffer, 0, 1); state.LogWriter.Write7BitEncodedInt(operation.Key.Count); state.LogWriter.Write(operation.Key.Array, operation.Key.Offset, operation.Key.Count); if (operation.Op != Operations.Put) continue; Bit.Set(buffer, 0, operation.Handle.Size); state.LogWriter.Write(buffer, 0, 4); using (var stream = state.MemTable.Read(operation.Handle)) { state.LogWriter.CopyFrom(stream); } } state.LogWriter.RecordCompleted(options.FlushToDisk); if (log.IsDebugEnabled) log.Debug("Wrote {0} operations in seq {1} to log.", opCount, seq); } catch (Exception e) { state.LogWriter.ResetToLastCompletedRecord(); throw new LogWriterException(e); } }); }
public Version(StorageState storageContext, VersionSet versionSet) : this(storageContext) { VersionSet = versionSet; }
public Table(StorageState storageState, FileData fileData) { _storageState = storageState; try { _fileData = fileData; if (_storageState.Options.MaxBlockCacheSizePerTableFile > 0) { _blockCache = new LruCache<BlockHandle, Block>(_storageState.Options.MaxBlockCacheSizePerTableFile); } if (fileData.Size < Footer.EncodedLength) throw new CorruptedDataException("File is too short to be an sstable"); var footer = new Footer(); using (var accessor = fileData.File.CreateAccessor(fileData.Size - Footer.EncodedLength, Footer.EncodedLength)) { footer.DecodeFrom(accessor); } var readOptions = new ReadOptions { VerifyChecksums = _storageState.Options.ParanoidChecks }; _indexBlock = new Block(_storageState.Options, readOptions, footer.IndexHandle, fileData); _indexBlock.IncrementUsage(); if (_storageState.Options.FilterPolicy == null) return; // we don't need any metadata using (var metaBlock = new Block(_storageState.Options, readOptions, footer.MetaIndexHandle, fileData)) using (var iterator = metaBlock.CreateIterator(CaseInsensitiveComparator.Default)) { var filterName = ("filter." + _storageState.Options.FilterPolicy.Name); iterator.Seek(filterName); if (iterator.IsValid && CaseInsensitiveComparator.Default.Compare(filterName, iterator.Key) == 0) { var handle = new BlockHandle(); using (var stream = iterator.CreateValueStream()) { handle.DecodeFrom(stream); } var filterAccessor = _fileData.File.CreateAccessor(handle.Position, handle.Count); try { _filter = _storageState.Options.FilterPolicy.CreateFilter(filterAccessor); } catch (Exception) { if (_filter == null) filterAccessor.Dispose(); else _filter.Dispose(); throw; } } } } catch (Exception) { Dispose(); throw; } }