private static async Task TestRandomInner(HybridCache cache, TestParams options, ITestLoggerFactory loggerFactory, CancellationToken cancellationToken) { var data = new byte[options.FileSize]; var dataSegment = new ArraySegment <byte>(data); var contentType = "application/octet-stream"; async Task <Tuple <string, ArraySegment <byte> > > DataProvider(CancellationToken token) { if (options.CreationTaskDelay.Ticks > 0) { await Task.Delay(options.CreationTaskDelay, cancellationToken); } if (options.CreationThreadSleep.Ticks > 0) { Thread.Sleep(options.CreationThreadSleep); } return(new Tuple <string, ArraySegment <byte> >(contentType, dataSegment)); } var random = new Random(options.Seed); var tasks = new List <Task <Tuple <TimeSpan, string> > >(); var swTotal = Stopwatch.StartNew(); for (var wave = 0; wave < options.RequestWaves; wave++) { if (wave != 0) { Thread.Sleep(options.RequestWavesIntermission); } Console.Write("Wave {0}, {1} requests...", wave + 1, options.RequestCountPerWave); var sw = Stopwatch.StartNew(); var memoryStreamManager = new RecyclableMemoryStreamManager(Math.Max(2, options.FileSize), 2, options.FileSize * 2 + 2); for (var ix = 0; ix < options.RequestCountPerWave; ix++) { Func <Task <Tuple <TimeSpan, string> > > task = async() => { var whichFile = random.Next(options.FileCount); var key = BitConverter.GetBytes(whichFile); var itemSw = Stopwatch.StartNew(); var cacheResult = await cache.GetOrCreateBytes(key, DataProvider, cancellationToken, options.RetrieveContentType); if (cacheResult.Data != null) { await using (cacheResult.Data) { await using var ms = memoryStreamManager.GetStream(); await cacheResult.Data.CopyToAsync(ms, cancellationToken); } } itemSw.Stop(); return(new Tuple <TimeSpan, string>(itemSw.Elapsed, cacheResult.Status)); }; if (options.Synchronous) { var result = await task(); tasks.Add(Task.FromResult(result)); } else { tasks.Add(Task.Run(task, cancellationToken)); } } await Task.WhenAll(tasks); sw.Stop(); var swAsync = Stopwatch.StartNew(); await cache.AwaitEnqueuedTasks(); swAsync.Stop(); Console.WriteLine("completed in {0}, plus {1} for async tasks. ", sw.Elapsed, swAsync.Elapsed); PrintDiskUtilization(options); } swTotal.Stop(); Console.WriteLine("Completed all waves in {0}", swTotal.Elapsed); Console.WriteLine(); // Accumulate results var resultCounts = new Dictionary <string, int>(); var resultTimes = new Dictionary <string, List <TimeSpan> >(); foreach (var t in tasks) { var key = t.Result.Item2; if (resultCounts.TryGetValue(key, out var value)) { resultCounts[key] = value + 1; } else { resultCounts[key] = 1; resultTimes[key] = new List <TimeSpan>(); } resultTimes[key].Add(t.Result.Item1); } foreach (var pair in resultCounts.OrderByDescending(p => p.Value)) { Console.WriteLine("{0} - {1} occurrences - {2}kb - 1st percentile {3} 5th percentile = {4} 50th percentile = {5} 95th percentile = {6} 99th percentile = {7}", pair.Key, pair.Value, pair.Value * options.FileSize / 1000, GetPercentile(resultTimes[pair.Key], 0.01f), GetPercentile(resultTimes[pair.Key], 0.05f), GetPercentile(resultTimes[pair.Key], 0.5f), GetPercentile(resultTimes[pair.Key], 0.95f), GetPercentile(resultTimes[pair.Key], 0.99f)); } var logCounts = new Dictionary <string, int>(); var logEntryCount = 0; foreach (var e in loggerFactory.Sink.LogEntries) { logEntryCount++; var key = e.OriginalFormat; if (logCounts.TryGetValue(key, out var value)) { logCounts[key] = value + 1; } else { logCounts[key] = 1; } } foreach (var pair in logCounts) { var percent = pair.Value * 100.0 / logEntryCount; Console.WriteLine("{0:00.00}% ({1} of {2}) log entries were {3}", percent, pair.Value, logEntryCount, pair.Key); } }
public async void SmokeTest() { var cancellationToken = CancellationToken.None; var path = Path.Combine(Path.GetTempPath(), $"{Guid.NewGuid()}"); Directory.CreateDirectory(path); var cacheOptions = new HybridCacheOptions(path) { AsyncCacheOptions = new AsyncCacheOptions() { MaxQueuedBytes = 0 } }; var database = new MetaStore.MetaStore(new MetaStoreOptions(path), cacheOptions, null); HybridCache cache = new HybridCache(database, cacheOptions, null); try { await cache.StartAsync(cancellationToken); var key = new byte[] { 0, 1, 2, 3 }; var contentType = "application/octet-stream"; Task <Tuple <string, ArraySegment <byte> > > DataProvider(CancellationToken token) { return(Task.FromResult(new Tuple <string, ArraySegment <byte> >(contentType, new ArraySegment <byte>(new byte[4000])))); } var result = await cache.GetOrCreateBytes(key, DataProvider, cancellationToken, true); Assert.Equal("WriteSucceeded", result.Status); var result2 = await cache.GetOrCreateBytes(key, DataProvider, cancellationToken, true); Assert.Equal("DiskHit", result2.Status); Assert.Equal(contentType, result2.ContentType); Assert.NotNull(result2.Data); await cache.AsyncCache.AwaitEnqueuedTasks(); var result3 = await cache.GetOrCreateBytes(key, DataProvider, cancellationToken, true); Assert.Equal("DiskHit", result3.Status); Assert.Equal(contentType, result3.ContentType); Assert.NotNull(result3.Data); var key2 = new byte[] { 2, 1, 2, 3 }; Task <Tuple <string, ArraySegment <byte> > > DataProvider2(CancellationToken token) { return(Task.FromResult(new Tuple <string, ArraySegment <byte> >(null, new ArraySegment <byte>(new byte[4000])))); } var result4 = await cache.GetOrCreateBytes(key2, DataProvider2, cancellationToken, true); Assert.Equal("WriteSucceeded", result4.Status); var result5 = await cache.GetOrCreateBytes(key2, DataProvider, cancellationToken, true); Assert.Equal("DiskHit", result5.Status); Assert.Null(result5.ContentType); Assert.NotNull(result5.Data); } finally { try { await cache.StopAsync(cancellationToken); } finally { Directory.Delete(path, true); } } }
private static async Task TestRandom(TestParams options, CancellationToken cancellationToken) { if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException(cancellationToken); } var path = Path.Combine(Path.GetTempPath(), $"{Guid.NewGuid()}"); Directory.CreateDirectory(path); Console.WriteLine($"Created cache directory {path}"); try { options.CacheOptions.PhysicalCacheDir = path; options.MetaStoreOptions.DatabaseDir = path; for (var reboot = 0; reboot < options.RebootCount; reboot++) { Console.WriteLine($"------------- Cache Reboot {reboot} ---------------"); var loggerFactory = TestLoggerFactory.Create(); var logger = loggerFactory.CreateLogger <HybridCache>(); ICacheDatabase database = new MetaStore.MetaStore(options.MetaStoreOptions, options.CacheOptions, logger); HybridCache cache = new HybridCache(database, options.CacheOptions, logger); try { Console.Write("Starting cache..."); var swStart = Stopwatch.StartNew(); await cache.StartAsync(cancellationToken); swStart.Stop(); Console.Write($"ready in {swStart.Elapsed}\r\n"); await TestRandomInner(cache, options, loggerFactory, cancellationToken); if (options.DisplayLog) { var logs = loggerFactory.Sink.LogEntries.ToArray(); int firstLogIndex = logs.Length - Math.Min(options.MaxLogEntries, logs.Length); int lastLogIndex = Math.Min(firstLogIndex, options.MaxLogEntries); if (lastLogIndex > 0) { Console.WriteLine($"========== LOG ENTRIES 0..{lastLogIndex} ==============="); } for (var ix = 0; ix < lastLogIndex; ix++) { Console.WriteLine(logs[ix].Message); } Console.WriteLine($"========== LOG ENTRIES {firstLogIndex}..{logs.Length} ==============="); for (var ix = firstLogIndex; ix < logs.Length; ix++) { Console.WriteLine(logs[ix].Message); } Console.WriteLine("============== END LOGS ==============="); } } finally { Console.Write("Stopping cache..."); var swStop = Stopwatch.StartNew(); await cache.StopAsync(cancellationToken); swStop.Stop(); Console.Write($"stopped in {swStop.Elapsed}\r\n"); } } if (options.WaitForKeypress) { Console.WriteLine("Press the any key to continue"); Console.ReadKey(); } } finally { Console.WriteLine("Deleting cache from disk..."); Directory.Delete(path, true); Console.WriteLine("Cache deleted"); } }