public async Task MultipleTreesInSingleBatch() { var batch = new WriteBatch(); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree1")), "tree1"); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree2")), "tree2"); Tree t1; Tree t2; using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { t1 = Env.CreateTree(tx, "tree1"); t2 = Env.CreateTree(tx, "tree2"); } await Env.Writer.WriteAsync(batch); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { using (var stream = t1.Read(tx, "key/1")) using (var reader = new StreamReader(stream)) { var result = reader.ReadToEnd(); Assert.Equal("tree1", result); } using (var stream = t2.Read(tx, "key/1")) using (var reader = new StreamReader(stream)) { var result = reader.ReadToEnd(); Assert.Equal("tree2", result); } } }
public void MultipleTreesInSingleBatch() { var batch = new WriteBatch(); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree1")), "tree1"); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree2")), "tree2"); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree2"); tx.Commit(); } Env.Writer.Write(batch); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var result = tx.Environment.State.GetTree(tx, "tree1").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree1", result); result = tx.Environment.State.GetTree(tx, "tree2").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree2", result); } }
public void NewIndexEntry() { if (AutoFlush && _writeBatch.Size() > FlushThresholdBytes) { Flush(); } CurrentDocumentId = _parent.NextDocumentId(); var sliceWriter = new SliceWriter(8); sliceWriter.WriteBigEndian(CurrentDocumentId); _currentDocumentIdSlice = sliceWriter.CreateSlice(); _writeBatch.Add(_currentDocumentIdSlice, Stream.Null, "docs"); }
public async Task MultipleBatchesTest() { int numberOfItems = 10000; var batch1 = new WriteBatch(); var batch2 = new WriteBatch(); for (int i = 0; i < numberOfItems; i++) { batch1.Add("key/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), Env.Root.Name); batch2.Add("yek/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), Env.Root.Name); } await Task.WhenAll(Env.Writer.WriteAsync(batch1), Env.Writer.WriteAsync(batch2)); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { for (int i = 0; i < numberOfItems; i++) { using (var stream = Env.Root.Read(tx, "key/" + i)) using (var reader = new StreamReader(stream)) { var result = reader.ReadToEnd(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); } using (var stream = Env.Root.Read(tx, "yek/" + i)) using (var reader = new StreamReader(stream)) { var result = reader.ReadToEnd(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); } } } }
public void Read_Items_From_Both_WriteBatch_And_Snapshot() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.State.GetTree(tx, "tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo2", StreamFor("foo2"), "tree"); var foo1ReadResult = snapshot.Read("tree", "foo1", writeBatch); var foo2ReadResult = snapshot.Read("tree", "foo2", writeBatch); var foo2ReadResultThatShouldBeNull = snapshot.Read("tree", "foo2"); Assert.NotNull(foo1ReadResult); Assert.NotNull(foo2ReadResult); Assert.Null(foo2ReadResultThatShouldBeNull); Assert.Equal(foo1ReadResult.Reader.ToStringValue(), "foo1"); Assert.Equal(foo2ReadResult.Reader.ToStringValue(), "foo2"); } }
public void Should_be_able_to_read_and_write_lots_of_data() { CreatTestSchema(); var writeBatch = new WriteBatch(); var testData = GenerateTestData().ToList(); foreach (var dataPair in testData) { writeBatch.Add(dataPair.Key, StreamFor(dataPair.Value), TestTreeName); } Env.Writer.Write(writeBatch); using (var snapshot = Env.CreateSnapshot()) { using (var iterator = snapshot.Iterate(TestTreeName)) { Assert.True(iterator.Seek(Slice.BeforeAllKeys)); do { var value = iterator.CreateReaderForCurrent().ToStringValue(); var extractedDataPair = new KeyValuePair <string, string>(iterator.CurrentKey.ToString(), value); Assert.Contains(extractedDataPair, testData); } while (iterator.MoveNext()); } } }
public void Read_Items_From_Both_WriteBatch_And_Snapshot() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.CreateTree(tx,"tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo2", StreamFor("foo2"), "tree"); var foo1ReadResult = snapshot.Read("tree", "foo1", writeBatch); var foo2ReadResult = snapshot.Read("tree", "foo2", writeBatch); var foo2ReadResultThatShouldBeNull = snapshot.Read("tree", "foo2"); Assert.NotNull(foo1ReadResult); Assert.NotNull(foo2ReadResult); Assert.Null(foo2ReadResultThatShouldBeNull); Assert.Equal(foo1ReadResult.Reader.ToStringValue(), "foo1"); Assert.Equal(foo2ReadResult.Reader.ToStringValue(), "foo2"); } }
public async Task MultipleTreesTest() { int numberOfItems = 10000; var batch1 = new WriteBatch(); var batch2 = new WriteBatch(); for (int i = 0; i < numberOfItems; i++) { batch1.Add("key/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), "tree1"); batch2.Add("yek/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), "tree2"); } using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree2"); tx.Commit(); } await Task.WhenAll(Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2))); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { for (int i = 0; i < numberOfItems; i++) { var result = tx.Environment.State.GetTree(tx, "tree1").Read("key/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); result = tx.Environment.State.GetTree(tx, "tree2").Read("yek/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); } } }
public async Task MultipleBatchesTest() { int numberOfItems = 10000; var batch1 = new WriteBatch(); var batch2 = new WriteBatch(); for (int i = 0; i < numberOfItems; i++) { batch1.Add("key/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), Constants.RootTreeName); batch2.Add("yek/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), Constants.RootTreeName); } await Task.WhenAll(Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2))); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { for (int i = 0; i < numberOfItems; i++) { var result = tx.Root.Read("key/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); result = tx.Root.Read("yek/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); } } }
public void ReadVersion_Items_From_Both_WriteBatch_And_Snapshot_WithoutVersionNumber() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.CreateTree(tx,"tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo2", StreamFor("foo2"), "tree"); var foor1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); var foo2Version = snapshot.ReadVersion("tree", "foo2", writeBatch); var foo2VersionThatShouldBe0 = snapshot.ReadVersion("tree", "foo2"); Assert.Equal(1, foor1Version); Assert.Equal(0, foo2Version); //added to write batch without version number, so 0 is version number that is fetched Assert.Equal(0, foo2VersionThatShouldBe0); } }
public void WhenLastBatchOperationVersionIsNullThenVersionComesFromStorage() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.State.GetTree(tx, "tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Delete("foo1", "tree"); var foo1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); var foo1VersionThatShouldBe1 = snapshot.ReadVersion("tree", "foo1"); Assert.Equal(1, foo1Version); Assert.Equal(1, foo1VersionThatShouldBe1); writeBatch.Add("foo1", StreamFor("123"), "tree"); foo1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); foo1VersionThatShouldBe1 = snapshot.ReadVersion("tree", "foo1"); Assert.Equal(1, foo1Version); Assert.Equal(1, foo1VersionThatShouldBe1); } }
public void Should_be_able_to_read_and_write_lots_of_data() { CreatTestSchema(); var writeBatch = new WriteBatch(); var testData = GenerateTestData().ToList(); foreach (var dataPair in testData) writeBatch.Add(dataPair.Key, StreamFor(dataPair.Value), TestTreeName); Env.Writer.Write(writeBatch); using (var snapshot = Env.CreateSnapshot()) { using (var iterator = snapshot.Iterate(TestTreeName)) { Assert.True(iterator.Seek(Slice.BeforeAllKeys)); do { var value = iterator.CreateReaderForCurrent().ToStringValue(); var extractedDataPair = new KeyValuePair<string, string>(iterator.CurrentKey.ToString(), value); Assert.Contains(extractedDataPair,testData); } while (iterator.MoveNext()); } } }
public void ReadVersion_Items_From_Both_WriteBatch_And_Snapshot() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.CreateTree(tx,"tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo2", StreamFor("foo2"), "tree", 1); var foor1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); var foo2Version = snapshot.ReadVersion("tree", "foo2", writeBatch); var foo2VersionThatShouldBe0 = snapshot.ReadVersion("tree", "foo2"); Assert.Equal(1, foor1Version); Assert.Equal(2, foo2Version); //is not committed yet Assert.Equal(0, foo2VersionThatShouldBe0); } }
public void ReadVersion_The_Same_Item_Both_WriteBatch_And_Snapshot_WriteBatch_Takes_Precedence() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.State.GetTree(tx, "tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { tx.Environment.State.GetTree(tx, "tree").Add("foo1", StreamFor("updated foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo1", StreamFor("updated foo1 2"), "tree", 2); var foo1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); var foo1VersionThatShouldBe2 = snapshot.ReadVersion("tree", "foo1"); Assert.Equal(3, foo1Version); Assert.Equal(2, foo1VersionThatShouldBe2); } }
public void Read_The_Same_Item_Both_WriteBatch_And_Snapshot_WriteBatch_Takes_Precedence() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.State.GetTree(tx, "tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo1", StreamFor("updated foo1"), "tree"); var foo1ReadResult = snapshot.Read("tree", "foo1", writeBatch); var foo1ReadResultWithoutWriteBatch = snapshot.Read("tree", "foo1"); Assert.NotNull(foo1ReadResult); Assert.NotNull(foo1ReadResultWithoutWriteBatch); Assert.Equal(foo1ReadResult.Reader.ToStringValue(), "updated foo1"); Assert.Equal(foo1ReadResultWithoutWriteBatch.Reader.ToStringValue(), "foo1"); } }
public override void Accept(Disk d) { var ms = new MemoryStream(); _serializer.Serialize(new JsonTextWriter(new StreamWriter(ms)), d); ms.Position = 0; var key = new Slice(EndianBitConverter.Big.GetBytes(counter++)); _currentBatch.Add(key, ms, "albums"); foreach (var diskId in d.DiskIds) { _currentBatch.MultiAdd(diskId, key, "ix_diskids"); } if (d.Artist != null) { _currentBatch.MultiAdd(d.Artist.ToLower(), key, "ix_artists"); } if (d.Title != null) { _currentBatch.MultiAdd(d.Title.ToLower(), key, "ix_titles"); } if (counter % 1000 == 0) { _storageEnvironment.Writer.Write(_currentBatch); _currentBatch = new WriteBatch(); } }
public virtual void Add(WriteBatch writeBatch, string key, byte[] value, ushort? expectedVersion = null) { var stream = new BufferPoolMemoryStream(BufferPool); stream.Write(value, 0, value.Length); stream.Position = 0; writeBatch.Add(key, stream, TableName, expectedVersion); }
public virtual void Add(WriteBatch writeBatch, Slice key, RavenJToken value, ushort? expectedVersion = null) { var stream = new BufferPoolMemoryStream(BufferPool); value.WriteTo(stream); stream.Position = 0; writeBatch.Add(key, stream, TableName, expectedVersion); }
public virtual void Add(WriteBatch writeBatch, Slice key, RavenJToken value, ushort?expectedVersion = null) { var stream = new BufferPoolMemoryStream(); value.WriteTo(stream); stream.Position = 0; writeBatch.Add(key, stream, TableName, expectedVersion); }
public virtual void Add(WriteBatch writeBatch, Slice key, byte[] value, ushort?expectedVersion = null) { var stream = new BufferPoolMemoryStream(); stream.Write(value, 0, value.Length); stream.Position = 0; writeBatch.Add(key, stream, TableName, expectedVersion); }
public void BatchConcurrencyExceptionShouldNotBeThrown() { var batch1 = new WriteBatch(); batch1.Add("key/1", StreamFor("123"), Constants.RootTreeName, 0); Env.Writer.Write(batch1); using (var snapshot = Env.CreateSnapshot()) { var version = snapshot.ReadVersion(Constants.RootTreeName, "key/1", batch1); Assert.Equal(1, version); batch1 = new WriteBatch(); batch1.Add("key/1", StreamFor("123"), Constants.RootTreeName, version); version = snapshot.ReadVersion(Constants.RootTreeName, "key/1", batch1); batch1.Add("key/1", StreamFor("123"), Constants.RootTreeName, version); Env.Writer.Write(batch1); } }
public async Task MergedBatchErrorHandling() { var batch1 = new WriteBatch(); batch1.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree1")), "tree1"); var batch2 = new WriteBatch(); batch2.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree2")), "tree2", version: 1); var batch3 = new WriteBatch(); batch3.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree3")), "tree3"); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree3"); tx.Commit(); } var disposable = Env.Writer.StopWrites(); // forcing to build one batch group from all batches that will be added between this line and _semaphore.Release var tasks = new[] { Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2)), Task.Run(() => Env.Writer.Write(batch3)) }; disposable.Dispose(); try { await Task.WhenAll(tasks); Assert.True(false); } catch (AggregateException e) { Assert.Equal("Cannot add 'key/1' to 'tree2' tree. Version mismatch. Expected: 1. Actual: 0.", e.InnerException.Message); } using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var result = tx.Environment.State.GetTree(tx, "tree1").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree1", result); result = tx.Environment.State.GetTree(tx, "tree3").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree3", result); } }
public void Replay() { var wasDebugRecording = _env.IsDebugRecording; _env.IsDebugRecording = false; using (var writeBatch = new WriteBatch()) { ActivityEntry entry; while (WriteQueue.TryDequeue(out entry)) { switch (entry.ActionType) { case DebugActionType.Add: writeBatch.Add(entry.Key, entry.ValueStream, entry.TreeName); break; case DebugActionType.Delete: writeBatch.Delete(entry.Key, entry.TreeName); break; case DebugActionType.MultiAdd: writeBatch.MultiAdd(entry.Key, new Slice(Encoding.UTF8.GetBytes(entry.Value.ToString())), entry.TreeName); break; case DebugActionType.MultiDelete: writeBatch.MultiDelete(entry.Key, new Slice(Encoding.UTF8.GetBytes(entry.Value.ToString())), entry.TreeName); break; case DebugActionType.CreateTree: using (var tx = _env.NewTransaction(TransactionFlags.ReadWrite)) { _env.CreateTree(tx, entry.TreeName); tx.Commit(); } break; case DebugActionType.Increment: //TODO : make sure this is correct here writeBatch.Increment(entry.Key, entry.ValueStream.ReadByte(), entry.TreeName); break; default: //precaution against newly added action types throw new InvalidOperationException("unsupported tree action type"); } } _env.Writer.Write(writeBatch); } _env.IsDebugRecording = wasDebugRecording; //restore the state as it was }
public void SingleItemBatchTest() { var batch = new WriteBatch(); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("123")), Constants.RootTreeName); Env.Writer.Write(batch); using (var snapshot = Env.CreateSnapshot()) { var reader = snapshot.Read(null, "key/1").Reader; Assert.Equal("123", reader.ToStringValue()); } }
public void Should_be_able_to_read_and_write_small_values() { CreatTestSchema(); var writeBatch = new WriteBatch(); writeBatch.Add("key",StreamFor("value"),TestTreeName); Env.Writer.Write(writeBatch); using (var snapshot = Env.CreateSnapshot()) { var storedValue = Encoding.UTF8.GetString(snapshot.Read(TestTreeName, "key").Reader.AsStream().ReadData()); Assert.Equal("value",storedValue); } }
public void Should_be_able_to_read_and_write_small_values() { CreatTestSchema(); var writeBatch = new WriteBatch(); writeBatch.Add("key", StreamFor("value"), TestTreeName); Env.Writer.Write(writeBatch); using (var snapshot = Env.CreateSnapshot()) { var storedValue = Encoding.UTF8.GetString(snapshot.Read(TestTreeName, "key").Reader.AsStream().ReadData()); Assert.Equal("value", storedValue); } }
public void SingleItemBatchTest() { var batch = new WriteBatch(); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("123")), Constants.RootTreeName); Env.Writer.Write(batch); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var stream = tx.State.Root.Read("key/1"); Assert.Equal("123", stream.Reader.ToStringValue()); } }
private void WriteLotsOfTestDataForTree(string treeName) { for (int i = 0; i < 50; i++) { using (var writeBatch = new WriteBatch()) { for (int j = 0; j < 500; j++) { var index = (i + "/ " + j); writeBatch.Add("key/" + index, StreamFor("value/" + index), treeName); } Env.Writer.Write(writeBatch); } } }
private void AddRecords(StorageEnvironment env, IList <Tree> trees, int documentCount, byte[] testBuffer, bool sequential) { var key = Guid.NewGuid().ToString(); var batch = new WriteBatch(); for (int i = 0; i < documentCount; i++) { foreach (var tree in trees) { var id = sequential ? string.Format("tree_{0}_record_{1}_key_{2}", tree.Name, i, key) : Guid.NewGuid().ToString(); batch.Add(id, new MemoryStream(testBuffer), tree.Name); } } env.Writer.Write(batch); }
private List <PerformanceRecord> WriteInternalBatch( string operation, IEnumerator <TestData> enumerator, long itemsPerBatch, long numberOfBatches, PerfTracker perfTracker, StorageEnvironment env) { var sw = new Stopwatch(); byte[] valueToWrite = null; var records = new List <PerformanceRecord>(); for (var b = 0; b < numberOfBatches; b++) { sw.Restart(); long v = 0; using (var batch = new WriteBatch()) { for (var i = 0; i < itemsPerBatch; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); v += valueToWrite.Length; batch.Add(enumerator.Current.Id.ToString("0000000000000000"), new MemoryStream(valueToWrite), "Root"); } env.Writer.Write(batch); } sw.Stop(); perfTracker.Record(sw.ElapsedMilliseconds); records.Add(new PerformanceRecord { Bytes = v, Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerBatch }); } return(records); }
public async Task SingleItemBatchTest() { var batch = new WriteBatch(); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("123")), Env.Root.Name); await Env.Writer.WriteAsync(batch); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { using (var stream = Env.Root.Read(tx, "key/1")) using (var reader = new StreamReader(stream)) { var result = reader.ReadToEnd(); Assert.Equal("123", result); } } }
public void RebalancerIssue() { const int DocumentCount = 750; var rand = new Random(); var testBuffer = new byte[757]; rand.NextBytes(testBuffer); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); tx.Commit(); } var batch = new WriteBatch(); for (var i = 0; i < DocumentCount; i++) { batch.Add("Foo" + i, new MemoryStream(testBuffer), "tree1"); } Env.Writer.Write(batch); batch = new WriteBatch(); for (var i = 0; i < DocumentCount; i++) { if (i >= 180) { continue; } batch.Delete("Foo" + i, "tree1"); } Env.Writer.Write(batch); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { var t1 = tx.Environment.State.GetTree(tx, "tree1"); t1.Delete("Foo180"); // rebalancer fails to move 1st node from one branch to another } }
public void BatchConcurrencyExceptionShouldBeThrownWhenVersionMismatch() { var batch1 = new WriteBatch(); batch1.Add("key/1", StreamFor("123"), Constants.RootTreeName, 0); Env.Writer.Write(batch1); var batch2 = new WriteBatch(); batch2.Add("key/1", StreamFor("123"), Constants.RootTreeName, 2); var e = Assert.Throws<AggregateException>(() => Env.Writer.Write(batch2)).InnerException; Assert.Equal("Cannot add 'key/1' to 'Root' tree. Version mismatch. Expected: 2. Actual: 1.", e.Message); var batch3 = new WriteBatch(); batch3.Delete("key/1", Constants.RootTreeName, 2); e = Assert.Throws<AggregateException>(() => Env.Writer.Write(batch3)).InnerException; Assert.Equal("Cannot delete 'key/1' to 'Root' tree. Version mismatch. Expected: 2. Actual: 1.", e.Message); }
public override int Accept(string d) { var disk = JObject.Parse(d); var ms = new MemoryStream(); var writer = new StreamWriter(ms); writer.Write(d); writer.Flush(); ms.Position = 0; var key = new Slice(EndianBitConverter.Big.GetBytes(counter++)); _currentBatch.Add(key, ms, "albums"); int count = 1; foreach (var diskId in disk.Value <JArray>("DiskIds")) { count++; _currentBatch.MultiAdd(diskId.Value <string>(), key, "ix_diskids"); } var artist = disk.Value <string>("Artist"); if (artist != null) { count++; _currentBatch.MultiAdd(artist.ToLower(), key, "ix_artists"); } var title = disk.Value <string>("Title"); if (title != null) { count++; _currentBatch.MultiAdd(title.ToLower(), key, "ix_titles"); } if (counter % 500 == 0) { _storageEnvironment.Writer.Write(_currentBatch); _currentBatch = new WriteBatch(); } return(count); }
private void WriteTestDataToEnv() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "TestTree"); tx.Commit(); } var writeBatch = new WriteBatch(); writeBatch.Add("foo", StreamFor("bar"), "TestTree"); Env.Writer.Write(writeBatch); writeBatch = new WriteBatch(); writeBatch.Add("bar", StreamFor("foo"), "TestTree"); Env.Writer.Write(writeBatch); }
public void Record_debug_journal_and_replay_it_with_manual_flushing() { using (var env = new StorageEnvironment(StorageEnvironmentOptions.CreateMemoryOnly())) { env.DebugJournal = new DebugJournal(debugJouralName, env, true); using (var tx = env.NewTransaction(TransactionFlags.ReadWrite)) { env.CreateTree(tx, "test-tree"); tx.Commit(); } using (var writeBatch = new WriteBatch()) { var valueBuffer = new MemoryStream(Encoding.UTF8.GetBytes("{ \"title\": \"foo\",\"name\":\"bar\"}")); writeBatch.Add("foo", valueBuffer, "test-tree"); env.Writer.Write(writeBatch); } using (env.Options.AllowManualFlushing()) { env.FlushLogToDataFile(); } using (var tx = env.NewTransaction(TransactionFlags.ReadWrite)) using (env.Options.AllowManualFlushing()) { env.FlushLogToDataFile(tx); tx.Commit(); } } using (var env = new StorageEnvironment(StorageEnvironmentOptions.CreateMemoryOnly())) { env.DebugJournal = DebugJournal.FromFile(debugJouralName, env); env.DebugJournal.Replay(); using (var snapshot = env.CreateSnapshot()) { Assert.Equal("{ \"title\": \"foo\",\"name\":\"bar\"}", snapshot.Read("test-tree", "foo").Reader.ToStringValue()); } } }
public async Task BatchErrorHandling() { var batch1 = new WriteBatch(); batch1.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree1")), "tree1"); var batch2 = new WriteBatch(); batch2.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree2")), "tree2", version: 1); var batch3 = new WriteBatch(); batch3.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree3")), "tree3"); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree3"); tx.Commit(); } try { await Task.WhenAll(Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2)), Task.Run(() => Env.Writer.Write(batch3))); Assert.True(false); } catch (AggregateException e) { Assert.Equal("Cannot add 'key/1' to 'tree2' tree. Version mismatch. Expected: 1. Actual: 0.", e.InnerException.Message); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var result = tx.Environment.State.GetTree(tx, "tree1").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree1", result); result = tx.Environment.State.GetTree(tx, "tree3").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree3", result); } } }
public void WriteSomethingToVoron() { var serializer = new JsonSerializer(); using (var storage = new StorageEnvironment(StorageEnvironmentOptions.GetInMemory())) { using (var tx = storage.NewTransaction(TransactionFlags.ReadWrite)) { storage.CreateTree(tx, "foos"); tx.Commit(); } { var ms = new MemoryStream(); var batch = new WriteBatch(); var foo = new Foo { Id = "hello", Value = 99 }; using (var writer = new StreamWriter(ms)) { serializer.Serialize(new JsonTextWriter(writer), foo); writer.Flush(); ms.Position = 0; //var key = new Slice(EndianBitConverter.Big.GetBytes(counter++)); batch.Add(foo.Id, ms, "foos"); storage.Writer.Write(batch); } } using (var tx = storage.NewTransaction(TransactionFlags.Read)) { var foos = tx.GetTree("foos"); var readResult = foos.Read(tx, "hello"); using (var stream = readResult.Reader.AsStream()) { var foo = serializer.Deserialize<Foo>(new JsonTextReader(new StreamReader(stream))); Assert.Equal(99, foo.Value); } } } }
public void BatchMissing() { var batch1 = new WriteBatch(); batch1.Add("key/1", StreamFor("123"), Constants.RootTreeName, 0); Env.Writer.Write(batch1); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { Assert.Equal(1, tx.Root.ReadVersion("key/1")); } var batch2 = new WriteBatch(); batch2.Add("key/1", StreamFor("123"), Constants.RootTreeName, 0); var e = Assert.Throws <AggregateException>(() => Env.Writer.Write(batch2)).InnerException; Assert.Equal("Cannot add 'key/1' to 'Root' tree. Version mismatch. Expected: 0. Actual: 1.", e.Message); }
public void RebalancerIssue() { const int DocumentCount = 750; var rand = new Random(); var testBuffer = new byte[757]; rand.NextBytes(testBuffer); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); tx.Commit(); } var batch = new WriteBatch(); for (var i = 0; i < DocumentCount; i++) { batch.Add("Foo" + i, new MemoryStream(testBuffer), "tree1"); } Env.Writer.Write(batch); batch = new WriteBatch(); for (var i = 0; i < DocumentCount; i++) { if (i >= 180) continue; batch.Delete("Foo" + i, "tree1"); } Env.Writer.Write(batch); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { var t1 = tx.Environment.State.GetTree(tx,"tree1"); t1.Delete("Foo180"); // rebalancer fails to move 1st node from one branch to another } }
public void BatchDelete() { var batch1 = new WriteBatch(); batch1.Add("key/1", StreamFor("123"), Constants.RootTreeName); Env.Writer.Write(batch1); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { Assert.Equal(1, tx.State.Root.ReadVersion("key/1")); } var batch2 = new WriteBatch(); batch2.Delete("key/1", Constants.RootTreeName); Env.Writer.Write(batch2); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { Assert.Equal(0, tx.State.Root.ReadVersion("key/1")); } }
public void SplittersAndRebalancersShouldNotChangeNodeVersion() { const int DocumentCount = 100000; var rand = new Random(); var testBuffer = new byte[123]; rand.NextBytes(testBuffer); Tree t1 = null; using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { t1 = Env.CreateTree(tx, "tree1"); tx.Commit(); } var batch = new WriteBatch(); for (var i = 0; i < DocumentCount; i++) { batch.Add("Foo" + i, new MemoryStream(testBuffer), "tree1"); } Env.Writer.Write(batch); batch = new WriteBatch(); using (var snapshot = Env.CreateSnapshot()) { for (var i = 0; i < DocumentCount; i++) { var result = snapshot.Read("tree1", "Foo" + 1, null); batch.Delete("Foo" + i, "tree1", result.Version); } } Env.Writer.Write(batch); }
public virtual void Add(WriteBatch writeBatch, Slice key, Stream value, ushort? expectedVersion = null, bool shouldIgnoreConcurrencyExceptions = false) { writeBatch.Add(key, value, TableName, expectedVersion, shouldIgnoreConcurrencyExceptions); }
public static void Main() { var basePath = @"C:\Work\ravendb-3.0\Raven.Voron\Voron.Tryout\bin\Debug\v4"; var win = new Win32MemoryMapPager(Path.Combine(basePath, "v2", "Raven.voron")); var lin = new Win32MemoryMapPager(Path.Combine(basePath, "v2l", "Raven.voron")); var winPage = (PageHeader*)win.AcquirePagePointer(0); var linPage = (PageHeader*)lin.AcquirePagePointer(0); return; var path = "v4"; if (Directory.Exists(path)) Directory.Delete(path, true); Console.WriteLine(Process.GetCurrentProcess().Id); using (var env = new StorageEnvironment(StorageEnvironmentOptions.ForPath(path))) { var batch = new WriteBatch(); batch.Add("*****@*****.**", "Oren Eini", "Names"); env.Writer.Write(batch); using (var snp = env.CreateSnapshot()) { var reader = snp.Read("Names", "*****@*****.**"); if (reader == null) { Console.WriteLine("Couldn't find it"); } else { Console.WriteLine(reader.Reader.ToStringValue()); } } } using (var env = new StorageEnvironment(StorageEnvironmentOptions.ForPath(path))) { // using (var snp = env.CreateSnapshot()) // { // var reader = snp.Read ("Names", "*****@*****.**"); // if (reader == null) // { // Console.WriteLine ("Couldn't find it"); // } // else // { // Console.WriteLine (reader.Reader.ToStringValue()); // } // } } using (var env = new StorageEnvironment(StorageEnvironmentOptions.ForPath(path))) { using (var snp = env.CreateSnapshot()) { var reader = snp.Read("Names", "*****@*****.**"); if (reader == null) { Console.WriteLine("Couldn't find it"); } else { Console.WriteLine(reader.Reader.ToStringValue()); } } } Console.WriteLine("Done"); }
private void AddRecords(StorageEnvironment env, IList<Tree> trees, int documentCount, byte[] testBuffer, bool sequential) { var key = Guid.NewGuid().ToString(); var batch = new WriteBatch(); for (int i = 0; i < documentCount; i++) { foreach (var tree in trees) { var id = sequential ? string.Format("tree_{0}_record_{1}_key_{2}", tree.Name, i, key) : Guid.NewGuid().ToString(); batch.Add(id, new MemoryStream(testBuffer), tree.Name); } } env.Writer.Write(batch); }
public void BatchMissing() { var batch1 = new WriteBatch(); batch1.Add("key/1", StreamFor("123"), Constants.RootTreeName, 0); Env.Writer.Write(batch1); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { Assert.Equal(1, tx.State.Root.ReadVersion("key/1")); } var batch2 = new WriteBatch(); batch2.Add("key/1", StreamFor("123"), Constants.RootTreeName, 0); var e = Assert.Throws<AggregateException>(() => Env.Writer.Write(batch2)).InnerException; Assert.Equal("Cannot add 'key/1' to 'Root' tree. Version mismatch. Expected: 0. Actual: 1.", e.Message); }
public async Task MultipleBatchesTest() { int numberOfItems = 10000; var batch1 = new WriteBatch(); var batch2 = new WriteBatch(); for (int i = 0; i < numberOfItems; i++) { batch1.Add("key/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), Constants.RootTreeName); batch2.Add("yek/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), Constants.RootTreeName); } await Task.WhenAll(Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2))); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { for (int i = 0; i < numberOfItems; i++) { var result = tx.State.Root.Read("key/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); result = tx.State.Root.Read("yek/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); } } }
public async Task BatchErrorHandling() { var batch1 = new WriteBatch(); batch1.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree1")), "tree1"); var batch2 = new WriteBatch(); batch2.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree2")), "tree2", version: 1); var batch3 = new WriteBatch(); batch3.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree3")), "tree3"); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree3"); tx.Commit(); } try { await Task.WhenAll(Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2)), Task.Run(() => Env.Writer.Write(batch3))); Assert.True(false); } catch (AggregateException e) { Assert.Equal("Cannot add 'key/1' to 'tree2' tree. Version mismatch. Expected: 1. Actual: 0.", e.InnerException.Message); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var result = tx.Environment.CreateTree(tx,"tree1").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree1", result); result = tx.Environment.CreateTree(tx,"tree3").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree3", result); } } }
public void SingleItemBatchTest() { var batch = new WriteBatch(); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("123")), Constants.RootTreeName); Env.Writer.Write(batch); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var stream = tx.Root.Read("key/1"); Assert.Equal("123", stream.Reader.ToStringValue()); } }
public void MultipleItemBatchTest() { int numberOfItems = 10000; var batch = new WriteBatch(); for (int i = 0; i < numberOfItems; i++) { batch.Add("key/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), Constants.RootTreeName); } Env.Writer.Write(batch); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { for (int i = 0; i < numberOfItems; i++) { { var result = tx.Root.Read("key/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); } } } }
public async Task MultipleTreesTest() { int numberOfItems = 10000; var batch1 = new WriteBatch(); var batch2 = new WriteBatch(); for (int i = 0; i < numberOfItems; i++) { batch1.Add("key/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), "tree1"); batch2.Add("yek/" + i, new MemoryStream(Encoding.UTF8.GetBytes(i.ToString(CultureInfo.InvariantCulture))), "tree2"); } using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree2"); tx.Commit(); } await Task.WhenAll(Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2))); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { for (int i = 0; i < numberOfItems; i++) { var result = tx.Environment.CreateTree(tx,"tree1").Read("key/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); result = tx.Environment.CreateTree(tx,"tree2").Read("yek/" + i).Reader.ToStringValue(); Assert.Equal(i.ToString(CultureInfo.InvariantCulture), result); } } }
public void WhenLastBatchOperationVersionIsNullThenVersionComesFromStorage() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.CreateTree(tx,"tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Delete("foo1", "tree"); var foo1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); var foo1VersionThatShouldBe1 = snapshot.ReadVersion("tree", "foo1"); Assert.Equal(1, foo1Version); Assert.Equal(1, foo1VersionThatShouldBe1); writeBatch.Add("foo1", StreamFor("123"), "tree"); foo1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); foo1VersionThatShouldBe1 = snapshot.ReadVersion("tree", "foo1"); Assert.Equal(1, foo1Version); Assert.Equal(1, foo1VersionThatShouldBe1); } }
public void Read_The_Same_Item_Both_WriteBatch_And_Snapshot_WriteBatch_Takes_Precedence() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.CreateTree(tx,"tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo1", StreamFor("updated foo1"), "tree"); var foo1ReadResult = snapshot.Read("tree", "foo1", writeBatch); var foo1ReadResultWithoutWriteBatch = snapshot.Read("tree", "foo1"); Assert.NotNull(foo1ReadResult); Assert.NotNull(foo1ReadResultWithoutWriteBatch); Assert.Equal(foo1ReadResult.Reader.ToStringValue(), "updated foo1"); Assert.Equal(foo1ReadResultWithoutWriteBatch.Reader.ToStringValue(), "foo1"); } }
public async Task MergedBatchErrorHandling() { var batch1 = new WriteBatch(); batch1.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree1")), "tree1"); var batch2 = new WriteBatch(); batch2.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree2")), "tree2", version: 1); var batch3 = new WriteBatch(); batch3.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree3")), "tree3"); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree3"); tx.Commit(); } var disposable = Env.Writer.StopWrites(); // forcing to build one batch group from all batches that will be added between this line and _semaphore.Release var tasks = new[] { Task.Run(() => Env.Writer.Write(batch1)), Task.Run(() => Env.Writer.Write(batch2)), Task.Run(() => Env.Writer.Write(batch3)) }; disposable.Dispose(); try { await Task.WhenAll(tasks); Assert.True(false); } catch (AggregateException e) { Assert.Equal("Cannot add 'key/1' to 'tree2' tree. Version mismatch. Expected: 1. Actual: 0.", e.InnerException.Message); } using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var result = tx.Environment.CreateTree(tx,"tree1").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree1", result); result = tx.Environment.CreateTree(tx,"tree3").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree3", result); } }
private static void FillBatchReadBatchOneTransaction(Stopwatch sw, int iterations) { using (var env = new StorageEnvironment(StorageEnvironmentOptions.ForPath(Path))) { sw.Start(); using (var tx = env.NewTransaction(TransactionFlags.Read)) { var ms = new byte[100]; var batch = new WriteBatch(); for (int i = 0; i < iterations; i++) { var key = i.ToString("0000000000000000"); batch.Add(key, new MemoryStream(), null); } using (var snapshot = env.CreateSnapshot()) { for (int i = 0; i < iterations; i++) { var key = i.ToString("0000000000000000"); var read = snapshot.Read(null, key, batch).Reader; { while (read.Read(ms, 0, ms.Length) != 0) { } } } } tx.Commit(); } sw.Stop(); } }
public void MultipleTreesInSingleBatch() { var batch = new WriteBatch(); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree1")), "tree1"); batch.Add("key/1", new MemoryStream(Encoding.UTF8.GetBytes("tree2")), "tree2"); using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree1"); Env.CreateTree(tx, "tree2"); tx.Commit(); } Env.Writer.Write(batch); using (var tx = Env.NewTransaction(TransactionFlags.Read)) { var result = tx.Environment.CreateTree(tx,"tree1").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree1", result); result = tx.Environment.CreateTree(tx,"tree2").Read("key/1").Reader.ToStringValue(); Assert.Equal("tree2", result); } }
private List<PerformanceRecord> WriteInternalBatch( string operation, IEnumerator<TestData> enumerator, long itemsPerBatch, long numberOfBatches, PerfTracker perfTracker, StorageEnvironment env) { var sw = new Stopwatch(); byte[] valueToWrite = null; var records = new List<PerformanceRecord>(); for (var b = 0; b < numberOfBatches; b++) { sw.Restart(); long v = 0; using (var batch = new WriteBatch()) { for (var i = 0; i < itemsPerBatch; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); v += valueToWrite.Length; batch.Add(enumerator.Current.Id.ToString("0000000000000000"), new MemoryStream(valueToWrite), "Root"); } env.Writer.Write(batch); } sw.Stop(); perfTracker.Record(sw.ElapsedMilliseconds); records.Add(new PerformanceRecord { Bytes = v, Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerBatch }); } return records; }
public void ReadVersion_The_Same_Item_Both_WriteBatch_And_Snapshot_WriteBatch_Takes_Precedence() { using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { Env.CreateTree(tx, "tree"); tx.Environment.CreateTree(tx,"tree").Add("foo1", StreamFor("foo1")); tx.Commit(); } using (var tx = Env.NewTransaction(TransactionFlags.ReadWrite)) { tx.Environment.CreateTree(tx,"tree").Add("foo1", StreamFor("updated foo1")); tx.Commit(); } using (var writeBatch = new WriteBatch()) using (var snapshot = Env.CreateSnapshot()) { writeBatch.Add("foo1", StreamFor("updated foo1 2"), "tree", 2); var foo1Version = snapshot.ReadVersion("tree", "foo1", writeBatch); var foo1VersionThatShouldBe2 = snapshot.ReadVersion("tree", "foo1"); Assert.Equal(3, foo1Version); Assert.Equal(2, foo1VersionThatShouldBe2); } }