public static Task CleanSubspace(IFdbDatabase db, IKeySubspace subspace, CancellationToken ct) { Assert.That(subspace, Is.Not.Null, "null db"); Assert.That(subspace.GetPrefix(), Is.Not.EqualTo(Slice.Empty), "Cannot clean the root of the database!"); return(db.WriteAsync(tr => tr.ClearRange(subspace.ToRange()), ct)); }
private static async Task BurnerThread(IFdbDatabase db, CancellationToken ct) { var folder = await db.Directory.CreateOrOpenAsync(new[] { "Benchmarks", "Burner", "Sequential" }, ct); await db.WriteAsync((tr) => tr.ClearRange(folder), ct); long pos = 0; Random rnd; lock (Rnd) { rnd = new Random(Rnd.Next()); } using (var tr = db.BeginTransaction(ct)) { while (!ct.IsCancellationRequested) { FdbException error = null; try { tr.Reset(); for (int i = 0; i < N; i++) { long x = Randomized ? rnd.Next() : pos + i; tr.Set(folder.Keys.Encode(x, Suffix), Value); Interlocked.Increment(ref Keys); } pos += N; await tr.CommitAsync(); Interlocked.Increment(ref Transactions); Interlocked.Add(ref Bytes, tr.Size); } catch (FdbException e) { error = e; } if (error != null && !ct.IsCancellationRequested) { await tr.OnErrorAsync(error.Code); } } } }
private static async Task BurnerThread(IFdbDatabase db, CancellationToken ct) { var folder = await db.Directory.CreateOrOpenAsync(new[] { "Benchmarks", "Burner", "Sequential" }, ct); await db.WriteAsync((tr) => tr.ClearRange(folder), ct); long pos = 0; Random rnd; lock(Rnd) { rnd = new Random(Rnd.Next()); } using (var tr = db.BeginTransaction(ct)) { while (!ct.IsCancellationRequested) { FdbException error = null; try { tr.Reset(); for(int i = 0; i < N; i++) { long x = Randomized ? rnd.Next() : pos + i; tr.Set(folder.Keys.Encode(x, Suffix), Value); Interlocked.Increment(ref Keys); } pos += N; await tr.CommitAsync(); Interlocked.Increment(ref Transactions); Interlocked.Add(ref Bytes, tr.Size); } catch (FdbException e) { error = e; } if (error != null && !ct.IsCancellationRequested) { await tr.OnErrorAsync(error.Code); } } } }
/// <summary> /// Simulate a student that is really indecisive /// </summary> public async Task RunWorker(IFdbDatabase db, int id, CancellationToken ct) { string student = "WORKER" + id.ToString("D04"); var rnd = new Random(id * 7); var values = new string[this.M]; for (int i = 0; i < values.Length; i++) { values[i] = "initial_value_" + rnd.Next(); } var location = this.Subspace.Partition.ByKey(student); for (int i = 0; i < 1 /*this.N*/ && !ct.IsCancellationRequested; i++) { // randomly mutate values var n = rnd.Next(values.Length / 2); for (int j = 0; j < n; j++) { values[rnd.Next(values.Length)] = "value_" + i.ToString() + "_" + rnd.Next().ToString(); } long now = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // write everything await db.WriteAsync((tr) => { if (tr.Context.Retries > 0) { Console.Write("!"); } for (int j = 0; j < values.Length; j++) { tr.Set(location.Keys.Encode(j, now), Slice.FromString(values[j] + new string('A', 100))); } }, ct); Console.Write("."); //var r = await db.ReadAsync(async (tr) => //{ // if (tr.Context.Retries > 0) Console.Write("!"); // return await Task.WhenAll(Enumerable.Range(0, values.Length).Select(x => tr.GetRange(FdbKeyRange.StartsWith(prefix.Pack(x))).LastOrDefaultAsync())); //}, ct); //if (i % 10 == 0) Console.Write(":"); //await Task.Delay(this.Delay); } ct.ThrowIfCancellationRequested(); }
/// <summary> /// Setup the initial state of the database /// </summary> public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Benchmarks", "LeakTest" }, cancellationToken: ct); // clear all previous values await db.ClearRangeAsync(this.Subspace, ct); // insert all the classes await db.WriteAsync((tr) => { tr.Set(this.Subspace.Concat(FdbKey.MinValue), Slice.FromString("BEGIN")); tr.Set(this.Subspace.Concat(FdbKey.MaxValue), Slice.FromString("END")); }, ct); }
/// <summary> /// Setup the initial state of the database /// </summary> public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Benchmarks", "LeakTest" }, cancellationToken : ct); // clear all previous values await db.ClearRangeAsync(this.Subspace, ct); // insert all the classes await db.WriteAsync((tr) => { tr.Set(this.Subspace.Key + FdbKey.MinValue, Slice.FromString("BEGIN")); tr.Set(this.Subspace.Key + FdbKey.MaxValue, Slice.FromString("END")); }, ct); }
/// <summary> /// Simulate a student that is really indecisive /// </summary> public async Task RunWorker(IFdbDatabase db, int id, CancellationToken ct) { string student = "WORKER" + id.ToString("D04"); var rnd = new Random(id * 7); var values = new string[this.M]; for (int i = 0; i < values.Length;i++) { values[i] = "initial_value_" + rnd.Next(); } var prefix = this.Subspace.Partition(student); for (int i = 0; i < 1/*this.N*/ && !ct.IsCancellationRequested; i++) { // randomly mutate values var n = rnd.Next(values.Length / 2); for (int j = 0; j < n;j++) { values[rnd.Next(values.Length)] = "value_" + i.ToString() + "_" + rnd.Next().ToString(); } long now = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond; // write everything await db.WriteAsync((tr) => { if (tr.Context.Retries > 0) Console.Write("!"); for (int j = 0; j < values.Length; j++) { tr.Set(prefix.Pack(j, now), Slice.FromString(values[j] + new string('A', 100))); } }, ct); Console.Write("."); //var r = await db.ReadAsync(async (tr) => //{ // if (tr.Context.Retries > 0) Console.Write("!"); // return await Task.WhenAll(Enumerable.Range(0, values.Length).Select(x => tr.GetRange(FdbKeyRange.StartsWith(prefix.Pack(x))).LastOrDefaultAsync())); //}, ct); //if (i % 10 == 0) Console.Write(":"); //await Task.Delay(this.Delay); } ct.ThrowIfCancellationRequested(); }
public static Task ChangeDirectoryLayer(FdbPath path, string layer, IVarTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { return(db.WriteAsync(async tr => { var dir = await BasicCommands.TryOpenCurrentDirectoryAsync(tr, path); if (dir == null) { Program.Error(log, $"# Directory {path} does not exist anymore"); } else { dir = await dir.ChangeLayerAsync(tr, layer); Program.Success(log, $"# Directory {path} layer changed to {dir.Layer}"); } }, ct)); }
/// <summary> /// Setup the initial state of the database /// </summary> public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Tutorials", "ClassScheduling" }, ct : ct); // clear all previous values await db.ClearRangeAsync(this.Subspace, ct); // insert all the classes await db.WriteAsync((tr) => { foreach (var c in this.ClassNames) { tr.Set(ClassKey(c), Slice.FromStringAscii("100")); } }, ct); }
/// <summary> /// Setup the initial state of the database /// </summary> public async Task Init(IFdbDatabase db, CancellationToken ct) { // open the folder where we will store everything this.Subspace = await db.Directory.CreateOrOpenAsync(new [] { "Tutorials", "ClassScheduling" }, cancellationToken: ct); // clear all previous values await db.ClearRangeAsync(this.Subspace, ct); // insert all the classes await db.WriteAsync((tr) => { foreach (var c in this.ClassNames) { tr.Set(ClassKey(c), Slice.FromAscii("100")); } }, ct); }
private Task Coalesce(IFdbDatabase db, int N, CancellationToken ct) { return(db.WriteAsync(async tr => { long total = 0; var subspace = await this.Location.Resolve(tr); if (subspace == null) { throw new InvalidOperationException($"Location '{this.Location} referenced by High Contention Counter Layer was not found."); } // read N writes from a random place in ID space var loc = subspace.Encode(RandomId()); bool right; lock (this.Rng) { right = this.Rng.NextDouble() < 0.5; } var query = right ? tr.Snapshot.GetRange(loc, subspace.ToRange().End, new FdbRangeOptions { Limit = N }) : tr.Snapshot.GetRange(subspace.ToRange().Begin, loc, new FdbRangeOptions { Limit = N, Reverse = true }); var shards = await query.ToListAsync().ConfigureAwait(false); if (shards.Count > 0) { // remove read shards transaction foreach (var shard in shards) { checked { total += this.Encoder.DecodeValue(shard.Value); } await tr.GetAsync(shard.Key).ConfigureAwait(false); // real read for isolation tr.Clear(shard.Key); } tr.Set(subspace.Encode(RandomId()), this.Encoder.EncodeValue(total)); } }, ct)); }
public static async Task CleanLocation(IFdbDatabase db, ISubspaceLocation location, CancellationToken ct) { Assert.That(db, Is.Not.Null, "null db"); if (location.Path.Count == 0 && location.Prefix.Count == 0) { Assert.Fail("Cannot clean the root of the database!"); } // if the prefix part is empty, then we simply recursively remove the corresponding sub-directory tree // If it is not empty, we only remove the corresponding subspace (without touching the sub-directories!) await db.WriteAsync(async tr => { tr.StopLogging(); if (location.Path.Count == 0) { // subspace under the root of the partition // get and clear subspace tr.ClearRange(KeyRange.StartsWith(location.Prefix)); } else if (location.Prefix.Count == 0) { // remove previous await db.DirectoryLayer.TryRemoveAsync(tr, location.Path); // create new _ = await db.DirectoryLayer.CreateAsync(tr, location.Path); } else { // subspace under a directory subspace // make sure the parent path exists! var subspace = await db.DirectoryLayer.CreateOrOpenAsync(tr, location.Path); // get and clear subspace tr.ClearRange(subspace.Partition[location.Prefix].ToRange()); } }, ct); }
/// <summary>Move/Rename a directory</summary> public static async Task MoveDirectory(FdbPath source, FdbPath destination, IVarTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { await db.WriteAsync(async tr => { var folder = await db.DirectoryLayer.TryOpenAsync(tr, source); if (folder == null) { Program.Error(log, $"# Source directory {source} does not exist!"); return; } folder = await db.DirectoryLayer.TryOpenAsync(tr, destination); if (folder != null) { Program.Error(log, $"# Destination directory {destination} already exists!"); return; } await db.DirectoryLayer.MoveAsync(tr, source, destination); }, ct); Program.Success(log, $"Moved {source} to {destination}"); }
private static async Task RunMultiClientTest(IFdbDatabase db, KeySubspace location, bool highContention, string desc, int K, int NUM, CancellationToken ct) { Log("Starting {0} test with {1} threads and {2} iterations", desc, K, NUM); var queue = new FdbQueue <string>(location, highContention); await db.WriteAsync((tr) => queue.Clear(tr), ct); // use a CTS to ensure that everything will stop in case of problems... using (var go = new CancellationTokenSource(TimeSpan.FromSeconds(30))) { var tok = go.Token; var pushLock = new AsyncCancelableMutex(tok); var popLock = new AsyncCancelableMutex(tok); int pushCount = 0; int popCount = 0; int stalls = 0; var pushTreads = Enumerable.Range(0, K) .Select(async id => { try { // wait for the signal await pushLock.Task.ConfigureAwait(false); var res = new List <string>(NUM); for (int i = 0; i < NUM; i++) { var item = id.ToString() + "." + i.ToString(); await db.ReadWriteAsync((tr) => queue.PushAsync(tr, item), tok).ConfigureAwait(false); Interlocked.Increment(ref pushCount); res.Add(item); } return(res); } catch (Exception e) { Log("PushThread[" + id + "] failed: " + e); Assert.Fail("PushThread[" + id + "] failed: " + e.Message); throw; } }).ToArray(); var popThreads = Enumerable.Range(0, K) .Select(async id => { try { // make everyone wait a bit, to ensure that they all start roughly at the same time await popLock.Task.ConfigureAwait(false); var res = new List <string>(NUM); int i = 0; while (i < NUM) { var item = await queue.PopAsync(db, tok).ConfigureAwait(false); if (item.HasValue) { Interlocked.Increment(ref popCount); res.Add(item.Value); ++i; } else { Interlocked.Increment(ref stalls); await Task.Delay(10).ConfigureAwait(false); } } return(res); } catch (Exception e) { Log("PopThread[" + id + "] failed: " + e); Assert.Fail("PopThread[" + id + "] failed: " + e.Message); throw; } }).ToArray(); var sw = Stopwatch.StartNew(); pushLock.Set(async: true); await Task.Delay(100); popLock.Set(async: true); //using (var timer = new Timer((_) => //{ // var __ = TestHelpers.DumpSubspace(db, location); //}, null, 1000, 4000)) { await Task.WhenAll(pushTreads); await Task.WhenAll(popThreads); } sw.Stop(); Log("> Finished {0} test in {1} seconds", desc, sw.Elapsed.TotalSeconds); Log("> Pushed {0}, Popped {1} and Stalled {2}", pushCount, popCount, stalls); var pushedItems = pushTreads.SelectMany(t => t.Result).ToList(); var poppedItems = popThreads.SelectMany(t => t.Result).ToList(); Assert.That(pushCount, Is.EqualTo(K * NUM)); Assert.That(popCount, Is.EqualTo(K * NUM)); // all pushed items should have been popped (with no duplicates) Assert.That(poppedItems, Is.EquivalentTo(pushedItems)); // the queue should be empty bool empty = await db.ReadAsync((tr) => queue.EmptyAsync(tr), ct); Assert.That(empty, Is.True); } }
private static async Task BenchMergeSortAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct) { Console.WriteLine($"=== BenchMergeSort(N={N:N0}, K={K:N0}, B={B:N0}) ==="); // create multiple lists var location = db.Root.ByKey("MergeSort"); await db.WriteAsync(async tr => { var subspace = await location.Resolve(tr); tr.ClearRange(subspace); }, ct); var sources = Enumerable.Range(0, K).Select(i => 'A' + i).ToArray(); var rnd = new Random(); // insert a number of random number lists Console.Write($"> Inserting {(K * N):N0} items... "); foreach (var source in sources) { using (var tr = await db.BeginTransactionAsync(ct)) { var list = await location.ByKey(source).Resolve(tr); for (int i = 0; i < N; i++) { tr.Set(list.Encode(rnd.Next()), Slice.FromInt32(i)); } await tr.CommitAsync(); } } Console.WriteLine("Done"); // merge/sort them to get only one (hopefully sorted) list using (var tr = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(tr); var mergesort = tr .MergeSort( sources.Select(source => KeySelectorPair.StartsWith(subspace.Encode(source))), (kvp) => subspace.DecodeLast <int>(kvp.Key) ) .Take(B) .Select(kvp => subspace.Unpack(kvp.Key)); Console.Write($"> MergeSort with limit {B:N0}... "); var sw = Stopwatch.StartNew(); var results = await mergesort.ToListAsync(); sw.Stop(); Console.WriteLine("Done"); Console.WriteLine($"Took {FormatTimeMilli(sw.Elapsed.TotalMilliseconds)} to merge sort {results.Count:N0} results from {K} lists of {N} items each"); //foreach (var result in results) //{ // Console.WriteLine(result.Get<int>(-1)); //} } Console.WriteLine(); }
public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) { const int WORKERS = 1; const int RUN_IN_SECONDS = 100; await Init(db, ct); log.WriteLine("Initialized for " + this.Mode.ToString()); var timeline = new RobustTimeLine( TimeSpan.FromSeconds(1), RobustHistogram.TimeScale.Milliseconds, (histo, idx) => { if (idx == 0) { Console.WriteLine("T+s | " + RobustHistogram.GetDistributionScale(RobustHistogram.HorizontalScale, 1, 5000 - 1) + " | "); } Console.WriteLine(String.Format(CultureInfo.InvariantCulture, "{0,3} | {1} | {2,6:#,##0.0} ms (+/- {3:#0.000})", idx, histo.GetDistribution(1, 5000 - 1), histo.Median, histo.MedianAbsoluteDeviation())); if (log != Console.Out) { log.WriteLine(histo.GetReport(false)); } return(false); } ); var duration = Stopwatch.StartNew(); var foo = this.Subspace.Keys.Encode("foo"); var bar = Slice.FromString("bar"); var barf = Slice.FromString("barf"); long total = 0; timeline.Start(); var elapsed = await Program.RunConcurrentWorkersAsync( WORKERS, async (i, _ct) => { var dur = Stopwatch.StartNew(); int k = 0; while (dur.Elapsed.TotalSeconds < RUN_IN_SECONDS) { var sw = Stopwatch.StartNew(); switch (this.Mode) { case BenchMode.GetReadVersion: { await db.ReadAsync(tr => tr.GetReadVersionAsync(), ct); break; } case BenchMode.Get: { if (this.Value <= 1) { await db.ReadAsync(tr => tr.GetAsync(foo), ct); } else { var foos = STuple.EncodePrefixedKeys(foo, Enumerable.Range(1, this.Value).ToArray()); await db.ReadAsync(tr => tr.GetValuesAsync(foos), ct); } break; } case BenchMode.Set: { await db.WriteAsync(tr => tr.Set(foo, bar), ct); break; } case BenchMode.Watch: { var w = await db.GetAndWatch(foo, ct); var v = w.Value; // swap v = v == bar ? barf : bar; await db.WriteAsync((tr) => tr.Set(foo, v), ct); await w; break; } } sw.Stop(); timeline.Add(sw.Elapsed.TotalMilliseconds); Console.Write(k.ToString() + "\r"); ++k; Interlocked.Increment(ref total); } }, ct ); timeline.Stop(); Console.WriteLine("Done "); Console.WriteLine("# Ran {0} transactions in {1:0.0##} sec", total, elapsed.TotalSeconds); var global = timeline.MergeResults(); log.WriteLine("# Merged results:"); log.WriteLine(global.GetReport(true)); if (log != Console.Out) { Console.WriteLine("# Merged results:"); Console.WriteLine(global.GetReport(true)); } }
/// <summary>Run the worker loop</summary> public async Task RunWorkerAsync(IFdbDatabase db, Func <FdbWorkerMessage, CancellationToken, Task> handler, CancellationToken ct) { int num = Interlocked.Increment(ref s_counter); var workerId = Slice.Nil; var previousTaskId = Slice.Nil; FdbWatch? watch = null; FdbWorkerMessage?msg = null; Interlocked.Increment(ref m_workers); try { while (true) { //TODO: how do we clear the previousTaskId from the db in case of cancellation ? ct.ThrowIfCancellationRequested(); var myId = Slice.Nil; await db.WriteAsync( async (tr) => { tr.Annotate("I'm worker #{0} with id {1:P}", num, workerId); myId = workerId; watch = null; msg = new FdbWorkerMessage(); if (previousTaskId != null) { // we need to clean up the previous task ClearTask(tr, previousTaskId); } else if (myId.IsPresent) { // look for an already assigned task tr.Annotate("Look for already assigned task"); msg.Id = await tr.GetAsync(this.BusyRing.Encode(myId)).ConfigureAwait(false); } if (!msg.Id.IsPresent) { // We aren't already assigned a task, so get an item from a random queue tr.Annotate("Look for next queued item"); // Find the next task on the queue var item = await tr.GetRange(this.UnassignedTaskRing.ToRange()).FirstOrDefaultAsync().ConfigureAwait(false); if (item.Key != null) { // pop the Task from the queue msg.Id = item.Value; tr.Clear(item.Key); } if (msg.Id.IsPresent) { // mark this worker as busy // note: we need a random id so generate one if it is the first time... if (!myId.IsPresent) { myId = GetRandomId(); } tr.Annotate("Found {0:P}, switch to busy with id {1:P}", msg.Id, myId); tr.Set(this.BusyRing.Encode(myId), msg.Id); this.Counters.Increment(tr, COUNTER_BUSY); } else if (myId.IsPresent) { // remove ourselves from the busy ring tr.Annotate("Found nothing, switch to idle with id {0:P}", myId); //tr.Clear(this.BusyRing.Pack(myId)); } } if (msg.Id.IsPresent) { // get the task body tr.Annotate("Fetching body for task {0:P}", msg.Id); var prefix = this.TaskStore.Partition.ByKey(msg.Id); //TODO: replace this with a get_range ? var data = await tr.GetValuesAsync(new [] { prefix.GetPrefix(), prefix.Encode(TASK_META_SCHEDULED) }).ConfigureAwait(false); msg.Body = data[0]; msg.Scheduled = new DateTime(data[1].ToInt64(), DateTimeKind.Utc); msg.Received = DateTime.UtcNow; } else { // There are no unassigned task, so enter the idle_worker_ring and wait for a task to be asssigned to us // remove us from the busy ring if (myId.IsPresent) { tr.Clear(this.BusyRing.Encode(myId)); this.Counters.Decrement(tr, COUNTER_BUSY); } // choose a new random position on the idle ring myId = GetRandomId(); // the idle key will also be used as the watch key to wake us up var watchKey = this.IdleRing.Encode(myId); tr.Annotate("Will start watching on key {0:P} with id {1:P}", watchKey, myId); tr.Set(watchKey, Slice.Empty); this.Counters.Increment(tr, COUNTER_IDLE); watch = tr.Watch(watchKey, ct); } }, success : (tr) => { // we have successfully acquired some work, or got a watch previousTaskId = Slice.Nil; workerId = myId; }, ct : ct ).ConfigureAwait(false); if (msg !.Id.IsNullOrEmpty) { // wait for someone to wake us up... Interlocked.Increment(ref m_idleWorkers); try { await watch !.Task; //Console.WriteLine("Worker #" + num + " woken up!"); } finally { Interlocked.Decrement(ref m_idleWorkers); } } else { //Console.WriteLine("Got task " + taskId); previousTaskId = msg.Id; if (msg.Body.IsNull) { // the task has been dropped? // TODO: loggin? #if DEBUG Console.WriteLine($"[####] Task[{msg.Id:P}] has vanished?"); #endif } else { try { await RunTask(db, msg, handler, ct); } catch (Exception e) { //TODO: logging? #if DEBUG Console.Error.WriteLine($"Task[{msg.Id:P}] failed: {e}"); #endif } } } }
/// <summary> /// Simulate a student that is really indecisive /// </summary> public async Task IndecisiveStudent(IFdbDatabase db, int id, int ops, CancellationToken ct) { string student = "s" + id.ToString("D04"); var allClasses = new List <string>(this.ClassNames); var myClasses = new List <string>(); var rnd = new Random(id * 7); for (int i = 0; i < ops && !ct.IsCancellationRequested; i++) { int classCount = myClasses.Count; var moods = new List <string>(); if (classCount > 0) { moods.AddRange(new[] { "drop", "switch" }); } if (classCount < 5) { moods.Add("add"); } string mood = moods[rnd.Next(moods.Count)]; try { if (allClasses == null) { allClasses = await db.ReadAsync((tr) => AvailableClasses(tr), ct); } switch (mood) { case "add": { string @class = allClasses[rnd.Next(allClasses.Count)]; await db.WriteAsync((tr) => Signup(tr, student, @class), ct); myClasses.Add(@class); break; } case "drop": { string @class = allClasses[rnd.Next(allClasses.Count)]; await db.WriteAsync((tr) => Drop(tr, student, @class), ct); myClasses.Remove(@class); break; } case "switch": { string oldClass = allClasses[rnd.Next(allClasses.Count)]; string newClass = allClasses[rnd.Next(allClasses.Count)]; await db.WriteAsync((tr) => Switch(tr, student, oldClass, newClass), ct); myClasses.Remove(oldClass); myClasses.Add(newClass); break; } default: { throw new InvalidOperationException("Ooops"); } } } catch (Exception e) { if (e is TaskCanceledException || e is OperationCanceledException) { throw; } allClasses = null; } } ct.ThrowIfCancellationRequested(); }
private static async Task RunMultiClientTest(IFdbDatabase db, FdbSubspace location, bool highContention, string desc, int K, int NUM, CancellationToken ct) { Console.WriteLine("Starting {0} test with {1} threads and {2} iterations", desc, K, NUM); var queue = new FdbQueue<string>(location, highContention); await db.WriteAsync((tr) => queue.Clear(tr), ct); // use a CTS to ensure that everything will stop in case of problems... using (var go = new CancellationTokenSource(TimeSpan.FromSeconds(30))) { var tok = go.Token; var pushLock = new AsyncCancelableMutex(tok); var popLock = new AsyncCancelableMutex(tok); int pushCount = 0; int popCount = 0; int stalls = 0; var pushTreads = Enumerable.Range(0, K) .Select(async id => { // wait for the signal await pushLock.Task.ConfigureAwait(false); var res = new List<string>(NUM); for (int i = 0; i < NUM; i++) { var item = id.ToString() + "." + i.ToString(); await db.ReadWriteAsync((tr) => queue.PushAsync(tr, item), tok).ConfigureAwait(false); Interlocked.Increment(ref pushCount); res.Add(item); } return res; }).ToArray(); var popThreads = Enumerable.Range(0, K) .Select(async id => { // make everyone wait a bit, to ensure that they all start roughly at the same time await popLock.Task.ConfigureAwait(false); var res = new List<string>(NUM); int i = 0; while (i < NUM) { var item = await queue.PopAsync(db, tok).ConfigureAwait(false); if (item.HasValue) { Interlocked.Increment(ref popCount); res.Add(item.Value); ++i; } else { Interlocked.Increment(ref stalls); await Task.Delay(10).ConfigureAwait(false); } } return res; }).ToArray(); var sw = Stopwatch.StartNew(); pushLock.Set(async: true); await Task.Delay(100); popLock.Set(async: true); //using (var timer = new Timer((_) => //{ // var __ = TestHelpers.DumpSubspace(db, location); //}, null, 1000, 4000)) { await Task.WhenAll(pushTreads); await Task.WhenAll(popThreads); } sw.Stop(); Console.WriteLine("> Finished {0} test in {1} seconds", desc, sw.Elapsed.TotalSeconds); Console.WriteLine("> Pushed {0}, Popped {1} and Stalled {2}", pushCount, popCount, stalls); var pushedItems = pushTreads.SelectMany(t => t.Result).ToList(); var poppedItems = popThreads.SelectMany(t => t.Result).ToList(); Assert.That(pushCount, Is.EqualTo(K * NUM)); Assert.That(popCount, Is.EqualTo(K * NUM)); // all pushed items should have been popped (with no duplicates) Assert.That(poppedItems, Is.EquivalentTo(pushedItems)); // the queue should be empty bool empty = await db.ReadAsync((tr) => queue.EmptyAsync(tr), ct); Assert.That(empty, Is.True); } }
public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) { const int WORKERS = 1; const int RUN_IN_SECONDS = 100; await Init(db, ct); log.WriteLine("Initialized for " + this.Mode.ToString()); var timeline = new RobustTimeLine( TimeSpan.FromSeconds(1), RobustHistogram.TimeScale.Milliseconds, (histo, idx) => { if (idx == 0) { Console.WriteLine("T+s | " + RobustHistogram.GetDistributionScale(RobustHistogram.HorizontalScale, 1, 5000 - 1) + " | "); } Console.WriteLine(String.Format(CultureInfo.InvariantCulture, "{0,3} | {1} | {2,6:#,##0.0} ms (+/- {3:#0.000})", idx, histo.GetDistribution(1, 5000 - 1), histo.Median, histo.MedianAbsoluteDeviation())); if (log != Console.Out) log.WriteLine(histo.GetReport(false)); return false; } ); var duration = Stopwatch.StartNew(); var foo = this.Subspace.Pack("foo"); var bar = Slice.FromString("bar"); var barf = Slice.FromString("barf"); long total = 0; timeline.Start(); var elapsed = await Program.RunConcurrentWorkersAsync( WORKERS, async (i, _ct) => { var dur = Stopwatch.StartNew(); int k = 0; while (dur.Elapsed.TotalSeconds < RUN_IN_SECONDS) { var sw = Stopwatch.StartNew(); switch(this.Mode) { case BenchMode.GetReadVersion: { await db.ReadAsync(tr => tr.GetReadVersionAsync(), ct); break; } case BenchMode.Get: { if (this.Value <= 1) { await db.ReadAsync(tr => tr.GetAsync(foo), ct); } else { var foos = FdbTuple.PackRange(foo, Enumerable.Range(1, this.Value).ToArray()); await db.ReadAsync(tr => tr.GetValuesAsync(foos), ct); } break; } case BenchMode.Set: { await db.WriteAsync(tr => tr.Set(foo, bar), ct); break; } case BenchMode.Watch: { var w = await db.GetAndWatch(foo, ct); var v = w.Value; if (v == bar) v = barf; else v = bar; await db.WriteAsync((tr) => tr.Set(foo, v), ct); await w; break; } } sw.Stop(); timeline.Add(sw.Elapsed.TotalMilliseconds); Console.Write(k.ToString() + "\r"); ++k; Interlocked.Increment(ref total); } }, ct ); timeline.Stop(); Console.WriteLine("Done "); Console.WriteLine("# Ran {0} transactions in {1:0.0##} sec", total, elapsed.TotalSeconds); var global = timeline.MergeResults(); log.WriteLine("# Merged results:"); log.WriteLine(global.GetReport(true)); if (log != Console.Out) { Console.WriteLine("# Merged results:"); Console.WriteLine(global.GetReport(true)); } }