public async Task TestDifferentCounts() { string databasePath = @"C:\Temp\Checkpoint\"; var countWithSetTotalOrderSeek = await getContentOnMachine(new MachineId(121), setTotalOrderSeek : true); var countWithoutSetTotalOrderSeek = await getContentOnMachine(new MachineId(121), setTotalOrderSeek : false); // Content count with SetTotalOrderSeek: 4193451, without: 4231080 Output.WriteLine($"Content count with SetTotalOrderSeek: {countWithSetTotalOrderSeek}, without: {countWithoutSetTotalOrderSeek}"); async Task <int> getContentOnMachine(MachineId machineId, bool setTotalOrderSeek) { var db = new RocksDbContentLocationDatabase(TestClock, new RocksDbContentLocationDatabaseConfiguration(new AbsolutePath(databasePath)) { CleanOnInitialize = false, Epoch = "DM_S220201001ReconcileTest.03312020.0", UseReadOptionsWithSetTotalOrderSeekInDbEnumeration = setTotalOrderSeek, }, () => CollectionUtilities.EmptyArray <MachineId>()); var context = new OperationContext(new Context(Logger)); await db.StartupAsync(context).ThrowIfFailure(); var count = db.EnumerateSortedHashesWithContentSizeForMachineId(context, currentMachineId: new MachineId(121)).Count(); await db.ShutdownAsync(context).ThrowIfFailure(); return(count); } }
public Task GarbageCollectionDeletesInLruOrder() { var context = new Context(Logger); var weakFingerprint = Fingerprint.Random(); var selector1 = Selector.Random(); var strongFingerprint1 = new StrongFingerprint(weakFingerprint, selector1); var contentHashListWithDeterminism1 = new ContentHashListWithDeterminism(ContentHashList.Random(), CacheDeterminism.None); var selector2 = Selector.Random(); var strongFingerprint2 = new StrongFingerprint(weakFingerprint, selector2); var contentHashListWithDeterminism2 = new ContentHashListWithDeterminism(ContentHashList.Random(), CacheDeterminism.None); return(RunTestAsync(context, funcAsync: async(store, session) => { await session.AddOrGetContentHashListAsync(context, strongFingerprint1, contentHashListWithDeterminism1, Token).ShouldBeSuccess(); _clock.Increment(); await session.AddOrGetContentHashListAsync(context, strongFingerprint2, contentHashListWithDeterminism2, Token).ShouldBeSuccess(); _clock.Increment(); // Force update the last access time of the first fingerprint await session.GetContentHashListAsync(context, strongFingerprint1, Token).ShouldBeSuccess(); _clock.Increment(); RocksDbContentLocationDatabase database = (store as RocksDbMemoizationStore)?.RocksDbDatabase; Contract.Assert(database != null); var ctx = new OperationContext(context); await database.GarbageCollectAsync(ctx).ShouldBeSuccess(); var r1 = database.GetContentHashList(ctx, strongFingerprint1).ShouldBeSuccess().ContentHashListWithDeterminism; r1.Should().BeEquivalentTo(contentHashListWithDeterminism1); var r2 = database.GetContentHashList(ctx, strongFingerprint2).ShouldBeSuccess().ContentHashListWithDeterminism; r2.ContentHashList.Should().BeNull(); r2.Determinism.Should().Be(CacheDeterminism.None); database.Counters[ContentLocationDatabaseCounters.GarbageCollectMetadataEntriesRemoved].Value.Should().Be(1); database.Counters[ContentLocationDatabaseCounters.GarbageCollectMetadataEntriesScanned].Value.Should().Be(2); }, createStoreFunc: createStoreInternal)); // This is needed because type errors arise if you inline IMemoizationStore createStoreInternal(DisposableDirectory disposableDirectory) { return(CreateStore(testDirectory: disposableDirectory, configMutator: (configuration) => { configuration.MetadataGarbageCollectionEnabled = true; configuration.MetadataGarbageCollectionMaximumNumberOfEntriesToKeep = 1; // Disables automatic GC configuration.GarbageCollectionInterval = Timeout.InfiniteTimeSpan; })); } }
/// <summary> /// Initializes a new instance of the <see cref="RocksDbMemoizationStore"/> class. /// </summary> public RocksDbMemoizationStore(ILogger logger, IClock clock, RocksDbMemoizationStoreConfiguration config) { Contract.Requires(logger != null); Contract.Requires(config != null); Contract.Requires(clock != null); _tracer = new MemoizationStoreTracer(logger, Component); _clock = clock; _database = new RocksDbContentLocationDatabase(clock, config.Database, () => new MachineId[] { }); }
public async Task TestMissingHashes() { // This test demonstrates that some hashes are missing when we enumerate without setting SetTotalOrderSeek. var missingHashes = new string[] { "VSO0:004206D0C9111C8494CBAC", "VSO0:004214AD89057251C75AB4", "VSO0:0042213A319603AEADDACA", "VSO0:00422E765196DDCEC5DDCB", "VSO0:00424E130937109ACC9FE1", "VSO0:00425A5011BC79F9A7B051", "VSO0:0042674A7B2782F0F4BEE4", "VSO0:004268BF4380D7CEA4786E", "VSO0:00426D1DAA182A6D265461", "VSO0:0042866D19EB33B8E11C8E", }.Select(str => ParseShortHash(str)).ToHashSet(); var maxHash = missingHashes.Max(); string databasePath = @"C:\Temp\Checkpoint\"; var hashesWithoutSetTotalOrderSeek = await getContentOnMachine(new MachineId(121), setTotalOrderSeek : false); var hashesWithSetTotalOrderSeek = await getContentOnMachine(new MachineId(121), setTotalOrderSeek : true); foreach (var h in missingHashes) { // Hash: VSO0: 0042866D19EB33B8E11C, Contains(TotalOrderSeek = True): True, Contains(TotalOrderSeek = false): False bool db1Contains = hashesWithSetTotalOrderSeek.Contains(h); bool db2Contains = hashesWithoutSetTotalOrderSeek.Contains(h); Output.WriteLine($"Hash: {h}, Contains (TotalOrderSeek=True): {db1Contains}, Contains (TotalOrderSeek=false): {db2Contains}"); } Output.WriteLine($"Content count with SetTotalOrderSeek: {hashesWithSetTotalOrderSeek.Count}, without: {hashesWithoutSetTotalOrderSeek.Count}"); async Task <HashSet <ShortHash> > getContentOnMachine(MachineId machineId, bool setTotalOrderSeek) { var db = new RocksDbContentLocationDatabase(TestClock, new RocksDbContentLocationDatabaseConfiguration(new AbsolutePath(databasePath)) { CleanOnInitialize = false, Epoch = "DM_S220201001ReconcileTest.03312020.0", UseReadOptionsWithSetTotalOrderSeekInDbEnumeration = setTotalOrderSeek, }, () => CollectionUtilities.EmptyArray <MachineId>()); var context = new OperationContext(new Context(Logger)); await db.StartupAsync(context).ThrowIfFailure(); var hashes = db.EnumerateSortedHashesWithContentSizeForMachineId(context, currentMachineId: new MachineId(121)) .TakeWhile(h => h.hash < maxHash || h.hash == maxHash).Select(h => h.hash).ToHashSet(); await db.ShutdownAsync(context).ThrowIfFailure(); return(hashes); } }
/// <nodoc /> public RocksDbContentPlacementPredictionStore(string storeLocation, bool clean) { _storeLocation = new AbsolutePath(storeLocation); var dbLocation = _storeLocation / "db"; var config = new RocksDbContentLocationDatabaseConfiguration(dbLocation) { StoreClusterState = true, CleanOnInitialize = clean, GarbageCollectionInterval = Timeout.InfiniteTimeSpan }; _clusterState = new ClusterState(); _database = new RocksDbContentLocationDatabase(SystemClock.Instance, config, () => new List <MachineId>()); }
public Task GarbageCollectionRuns() { var context = new Context(Logger); var weakFingerprint = Fingerprint.Random(); var selector1 = Selector.Random(); var selector2 = Selector.Random(); var strongFingerprint1 = new StrongFingerprint(weakFingerprint, selector1); var strongFingerprint2 = new StrongFingerprint(weakFingerprint, selector2); var contentHashListWithDeterminism1 = new ContentHashListWithDeterminism(ContentHashList.Random(), CacheDeterminism.None); var contentHashListWithDeterminism2 = new ContentHashListWithDeterminism(ContentHashList.Random(), CacheDeterminism.None); return(RunTestAsync(context, funcAsync: async(store, session) => { await session.AddOrGetContentHashListAsync(context, strongFingerprint1, contentHashListWithDeterminism1, Token).ShouldBeSuccess(); _clock.Increment(); // Notice we don't increment the clock here await session.AddOrGetContentHashListAsync(context, strongFingerprint2, contentHashListWithDeterminism2, Token).ShouldBeSuccess(); RocksDbContentLocationDatabase database = (store as RocksDbMemoizationStore)?.Database; Contract.Assert(database != null); var ctx = new OperationContext(context); database.GarbageCollect(ctx); var r1 = database.GetContentHashList(ctx, strongFingerprint1).ShouldBeSuccess().ContentHashListWithDeterminism; r1.ContentHashList.Should().BeNull(); r1.Determinism.Should().Be(CacheDeterminism.None); var r2 = database.GetContentHashList(ctx, strongFingerprint2).ShouldBeSuccess().ContentHashListWithDeterminism; r2.Should().BeEquivalentTo(contentHashListWithDeterminism2); }, createStoreFunc: createStoreInternal)); // This is needed because type errors arise if you inline IMemoizationStore createStoreInternal(DisposableDirectory disposableDirectory) { return(CreateStore(testDirectory: disposableDirectory, configMutator: (configuration) => { configuration.MetadataGarbageCollectionEnabled = true; configuration.MetadataGarbageCollectionProtectionTime = TimeSpan.FromMilliseconds(1); // Disables automatic GC configuration.GarbageCollectionInterval = Timeout.InfiniteTimeSpan; })); } }
public async Task MimicReconcileWithMachineIdFilteringWithNoDeserialization() { string databasePath = @"C:\Users\seteplia\AppData\Local\Temp\CloudStore\DatabaseTest\"; var db = new RocksDbContentLocationDatabase(TestClock, new RocksDbContentLocationDatabaseConfiguration(new AbsolutePath(databasePath)) { CleanOnInitialize = false }, () => CollectionUtilities.EmptyArray <MachineId>()); var context = new OperationContext(new Context(Logger)); await db.StartupAsync(context).ThrowIfFailure(); var sw = Stopwatch.StartNew(); var reconcile = MimicFastReconcileLogic(context, machineId: 42, db); sw.Stop(); Output.WriteLine($"Reconcile by {sw.ElapsedMilliseconds}ms. Added: {reconcile.addedContent.Count}, removed: {reconcile.removedContent.Count}"); }
public async Task MimicReconciliationWithFullDatabaseEnumerationByKeys() { // This is an original and slower version of reconciliation. string databasePath = @"C:\Temp\Checkpoint\"; var db = new RocksDbContentLocationDatabase(TestClock, new RocksDbContentLocationDatabaseConfiguration(new AbsolutePath(databasePath)) { CleanOnInitialize = false }, () => CollectionUtilities.EmptyArray <MachineId>()); var context = new OperationContext(new Context(Logger)); await db.StartupAsync(context).ThrowIfFailure(); var sw = Stopwatch.StartNew(); var reconcile = MimicOldReconcile(context, machineId: 121, db); sw.Stop(); Output.WriteLine($"Reconcile by {sw.ElapsedMilliseconds}ms. Added: {reconcile.addedContent.Count}, removed: {reconcile.removedContent.Count}"); }
/// <nodoc /> public RocksDbMemoizationStore(ILogger logger, RocksDbMemoizationDatabase database) : base(database) { RocksDbDatabase = (RocksDbContentLocationDatabase)database.Database; }
private static (List <ShortHash> removedContent, List <ShortHashWithSize> addedContent) MimicOldReconcile(OperationContext context, int machineId, RocksDbContentLocationDatabase db) { var dbContent = GetSortedDatabaseEntriesWithLocalLocationOld(context, db, machineId); // Diff the two views of the local machines content (left = local store, right = content location db) // Then send changes as events (ShortHash hash, long size)[] allLocalStoreContent = new (ShortHash hash, long size)[0];
private static IEnumerable <(ShortHash hash, long size)> GetSortedDatabaseEntriesWithLocalLocationOld(OperationContext context, RocksDbContentLocationDatabase db, int index) { // Originally, this was db.EnumerateSortedKeys(context), but that method is since private. This is left // here in case further work is required in the future. foreach (var hash in new ShortHash[] { }) { if (db.TryGetEntry(context, hash, out var entry)) { if (entry.Locations[index]) { // Entry is present on the local machine yield return(hash, entry.ContentSize); } } } }
private static IEnumerable <(ShortHash hash, long size)> GetSortedDatabaseEntriesWithLocalLocationOld(OperationContext context, RocksDbContentLocationDatabase db, int index) { foreach (var hash in db.EnumerateSortedKeys(context.Token)) { if (db.TryGetEntry(context, hash, out var entry)) { if (entry.Locations[index]) { // Entry is present on the local machine yield return(hash, entry.ContentSize); } } } }
/// <nodoc /> public RocksDbMemoizationDatabase(RocksDbContentLocationDatabase database) { Tracer = new Tracer(nameof(RocksDbMemoizationDatabase)); Database = database; }