private static async Task BenchConcurrentReadAsync(IFdbDatabase db, int N, CancellationToken ct) { // read a lot of small keys, concurrently Console.WriteLine($"=== BenchConcurrentRead(N={N:N0}) ==="); Console.WriteLine($"Reading {N:N0} keys (concurrent)"); var location = db.Root.ByKey("hello").AsTyped <int>(); var sw = Stopwatch.StartNew(); using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(trans); _ = await Task.WhenAll(Enumerable .Range(0, N) .Select((i) => trans.GetAsync(subspace[i])) ); } sw.Stop(); Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {N} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/read, {N / sw.Elapsed.TotalSeconds:N0} read/sec)"); Console.WriteLine(); sw = Stopwatch.StartNew(); using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(trans); _ = await trans.GetBatchAsync(Enumerable.Range(0, N).Select(i => subspace[i])); } sw.Stop(); Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {N:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/read, {N / sw.Elapsed.TotalSeconds:N0} read/sec)"); Console.WriteLine(); }
private static async Task BenchUpdateSameKeyLotsOfTimesAsync(IFdbDatabase db, int N, CancellationToken ct) { // continuously update same key by adding a little bit more Console.WriteLine($"=== BenchUpdateSameKeyLotsOfTimes(N={N:N0}) ==="); Console.WriteLine($"Updating the same list {N:N0} times..."); var list = new byte[N]; var update = Stopwatch.StartNew(); for (int i = 0; i < N; i++) { list[i] = (byte)i; using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await db.Root.Resolve(trans); trans.Set(subspace.Encode("list"), list.AsSlice()); await trans.CommitAsync(); } if (i % 100 == 0) { Console.Write($"\r> {i:N0} / {N:N0}"); } } update.Stop(); Console.WriteLine($"\rTook {update.Elapsed.TotalSeconds:N3} sec to fill a byte[{N:N0}] one by one ({FormatTimeMicro(update.Elapsed.TotalMilliseconds / N)}/update, {N / update.Elapsed.TotalSeconds:N0} update/sec)"); Console.WriteLine(); }
private static async Task BenchClearAsync(IFdbDatabase db, int N, CancellationToken ct) { // clear a lot of small keys, in a single transaction Console.WriteLine($"=== BenchClear(N={N:N0}) ==="); var location = db.Root.ByKey(Slice.FromStringAscii("hello")); var sw = Stopwatch.StartNew(); using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(trans); for (int i = 0; i < N; i++) { trans.Clear(subspace.Encode(i)); } await trans.CommitAsync(); } sw.Stop(); Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to clear {N:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/write, {N / sw.Elapsed.TotalSeconds:N0} clear/sec)"); Console.WriteLine(); }
private static async Task BenchSerialReadAsync(IFdbDatabase db, int N, CancellationToken ct) { // read a lot of small keys, one by one Console.WriteLine($"=== BenchSerialRead(N={N:N0}) ==="); Console.WriteLine($"Reading {N:N0} keys (serial, slow!)"); var location = db.Root.ByKey("hello"); var sw = Stopwatch.StartNew(); for (int k = 0; k < N; k += 1000) { using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(trans); for (int i = k; i < N && i < k + 1000; i++) { _ = await trans.GetAsync(subspace.Encode(i)); } } Console.Write("."); } Console.WriteLine(); sw.Stop(); Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {N:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/read, {N / sw.Elapsed.TotalSeconds:N0} read/sec)"); Console.WriteLine(); }
protected async Task DeleteSubspace(IFdbDatabase db, IKeySubspace subspace) { using (var tr = await db.BeginTransactionAsync(this.Cancellation)) { tr.ClearRange(subspace); await tr.CommitAsync(); } }
public static async Task DumpSubspace(IFdbDatabase db, IKeySubspace subspace, CancellationToken ct) { Assert.That(db, Is.Not.Null); using (var tr = await db.BeginTransactionAsync(ct)) { tr.StopLogging(); await DumpSubspace(tr, subspace).ConfigureAwait(false); } }
private static async Task BurnerThread(IFdbDatabase db, CancellationToken ct) { var folder = await db.ReadWriteAsync(async tr => { var x = await db.Root["Benchmarks"]["Burner"]["Sequential"].CreateOrOpenAsync(tr); tr.ClearRange(x); return(x); }, ct); long pos = 0; Random rnd; lock (Rnd) { rnd = new Random(Rnd.Next()); } using (var tr = await db.BeginTransactionAsync(ct)) { while (!ct.IsCancellationRequested) { FdbException error = null; try { tr.Reset(); for (int i = 0; i < N; i++) { long x = Randomized ? rnd.Next() : pos + i; tr.Set(folder.Encode(x, Suffix), Value); Interlocked.Increment(ref Keys); } pos += N; await tr.CommitAsync(); Interlocked.Increment(ref Transactions); Interlocked.Add(ref Bytes, tr.Size); } catch (FdbException e) { error = e; } if (error != null && !ct.IsCancellationRequested) { await tr.OnErrorAsync(error.Code); } } } }
private static async Task TestSimpleTransactionAsync(IFdbDatabase db, CancellationToken ct) { Console.WriteLine("=== TestSimpleTransaction() ==="); Console.WriteLine("Starting new transaction..."); using (var trans = await db.BeginTransactionAsync(ct)) { Console.WriteLine("> Transaction ready"); Console.WriteLine("Getting read version..."); var readVersion = await trans.GetReadVersionAsync(); Console.WriteLine("> Read Version = " + readVersion); Console.WriteLine("Resolving root location..."); var location = await db.Root.Resolve(trans); Console.WriteLine("> " + location); Console.WriteLine("Getting 'hello'..."); var result = await trans.GetAsync(location.Encode("hello")); if (result.IsNull) { Console.WriteLine("> hello NOT FOUND"); } else { Console.WriteLine($"> hello = {result:V}"); } Console.WriteLine("Setting 'Foo' = 'Bar'"); trans.Set(location.Encode("Foo"), Slice.FromString("Bar")); Console.WriteLine("Setting 'TopSecret' = rnd(512)"); var data = new byte[512]; new Random(1234).NextBytes(data); trans.Set(location.Encode("TopSecret"), data.AsSlice()); Console.WriteLine("Committing transaction..."); await trans.CommitAsync(); //trans.Commit(); Console.WriteLine("> Committed!"); Console.WriteLine("Getting comitted version..."); var writeVersion = trans.GetCommittedVersion(); Console.WriteLine("> Commited Version = " + writeVersion); } }
public async Task RunStatus(IFdbDatabase db, CancellationToken ct) { var countersLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('C')); var idleLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('I')); var busyLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('B')); var tasksLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('T')); var unassignedLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('U')); using (var tr = await db.BeginTransactionAsync(ct)) { var counters = await tr.Snapshot.GetRange(countersLocation.ToRange()).Select(kvp => new KeyValuePair <string, long>(countersLocation.DecodeLast <string>(kvp.Key), kvp.Value.ToInt64())).ToListAsync().ConfigureAwait(false); Console.WriteLine("Status at " + DateTimeOffset.Now.ToString("O")); foreach (var counter in counters) { Console.WriteLine(" - " + counter.Key + " = " + counter.Value); } Console.WriteLine("Dump:"); Console.WriteLine("> Idle"); await tr.Snapshot.GetRange(idleLocation.ToRange()).ForEachAsync((kvp) => { Console.WriteLine($"- Idle.{idleLocation.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("> Busy"); await tr.Snapshot.GetRange(busyLocation.ToRange()).ForEachAsync((kvp) => { Console.WriteLine($"- Busy.{busyLocation.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("> Unassigned"); await tr.Snapshot.GetRange(unassignedLocation.ToRange()).ForEachAsync((kvp) => { Console.WriteLine($"- Unassigned.{unassignedLocation.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("> Tasks"); await tr.Snapshot.GetRange(tasksLocation.ToRange()).ForEachAsync((kvp) => { Console.WriteLine($"- Tasks.{tasksLocation.Unpack(kvp.Key)} = {kvp.Value:V}"); }); Console.WriteLine("<"); } }
private static async Task BenchSerialWriteAsync(IFdbDatabase db, int N, CancellationToken ct) { // read a lot of small keys, one by one Console.WriteLine($"=== BenchSerialWrite(N={N:N0}) ==="); var location = db.Root.ByKey("hello"); var sw = Stopwatch.StartNew(); IFdbTransaction trans = null; IDynamicKeySubspace subspace = null; try { for (int i = 0; i < N; i++) { if (trans == null) { trans = await db.BeginTransactionAsync(ct); subspace = await location.Resolve(trans); } trans.Set(subspace.Encode(i), Slice.FromInt32(i)); if (trans.Size > 100 * 1024) { await trans.CommitAsync(); trans.Dispose(); trans = null; } } await trans.CommitAsync(); } finally { trans?.Dispose(); } sw.Stop(); Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {N:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/read, {N/sw.Elapsed.TotalSeconds:N0} read/sec)"); Console.WriteLine(); }
private static async Task BenchInsertSmallKeysAsync(IFdbDatabase db, int N, int size, CancellationToken ct) { // insert a lot of small key size, in a single transaction Console.WriteLine($"=== BenchInsertSmallKeys(N={N:N0}, size={size:N0}) ==="); var rnd = new Random(); var tmp = new byte[size]; var location = db.Root.ByKey("Batch"); var times = new List <TimeSpan>(); for (int k = 0; k <= 4; k++) { var sw = Stopwatch.StartNew(); using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(trans); rnd.NextBytes(tmp); for (int i = 0; i < N; i++) { tmp[0] = (byte)i; tmp[1] = (byte)(i >> 8); // (Batch, 1) = [......] // (Batch, 2) = [......] trans.Set(subspace.Encode(k * N + i), tmp.AsSlice()); } await trans.CommitAsync(); } sw.Stop(); times.Add(sw.Elapsed); } var min = times.Min(); var avg = times.Sum(x => x.TotalMilliseconds) / times.Count; Console.WriteLine($"[{Thread.CurrentThread.ManagedThreadId}] Took {min.TotalSeconds.ToString("N3", CultureInfo.InvariantCulture)} sec to insert {N} {size}-bytes items (min={FormatTimeMicro(min.TotalMilliseconds / N)}/write, avg={FormatTimeMicro(avg)}/write)"); Console.WriteLine(); }
public static async Task DumpLocation(IFdbDatabase db, ISubspaceLocation path, CancellationToken ct) { Assert.That(db, Is.Not.Null); using (var tr = await db.BeginTransactionAsync(ct)) { tr.StopLogging(); var subspace = await path.Resolve(tr); if (subspace == null) { FdbTest.Log($"Dumping content of subspace {path}:"); FdbTest.Log("> EMPTY!"); return; } await DumpSubspace(tr, subspace).ConfigureAwait(false); if (path.Prefix.Count == 0) { var names = await db.DirectoryLayer.TryListAsync(tr, path.Path); if (names != null) { foreach (var name in names) { var child = await db.DirectoryLayer.TryOpenAsync(tr, path.Path[name]); if (child != null) { await DumpSubspace(tr, child); } } } } } }
public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct) { // estimate the number of machines... Console.WriteLine("# Detecting cluster topology..."); var servers = await db.QueryAsync(tr => tr .WithReadAccessToSystemKeys() .GetRange(KeyRange.StartsWith(Fdb.System.ServerList)) .Select(kvp => new { Node = kvp.Value.Substring(8, 16).ToHexaString(), Machine = kvp.Value.Substring(24, 16).ToHexaString(), DataCenter = kvp.Value.Substring(40, 16).ToHexaString() }), ct ); var numNodes = servers.Select(s => s.Node).Distinct().Count(); var numMachines = servers.Select(s => s.Machine).Distinct().Count(); var numDCs = servers.Select(s => s.DataCenter).Distinct().Count(); Console.WriteLine("# > Found " + numNodes + " process(es) on " + numMachines + " machine(s) in " + numDCs + " datacenter(s)"); Console.WriteLine("# Reading list of shards..."); // dump keyServers var ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct); Console.WriteLine("# > Found " + ranges.Count + " shards:"); // take a sample var rnd = new Random(1234); int sz = Math.Max((int)Math.Ceiling(this.Ratio * ranges.Count), 1); if (sz > 500) { sz = 500; //SAFETY } if (sz < 50) { sz = Math.Max(sz, Math.Min(50, ranges.Count)); } var samples = new List <KeyRange>(); for (int i = 0; i < sz; i++) { int p = rnd.Next(ranges.Count); samples.Add(ranges[p]); ranges.RemoveAt(p); } Console.WriteLine("# Sampling " + sz + " out of " + ranges.Count + " shards (" + (100.0 * sz / ranges.Count).ToString("N1") + "%) ..."); Console.WriteLine("{0,9}{1,10}{2,10}{3,10} : K+V size distribution", "Count", "Keys", "Values", "Total"); var rangeOptions = new FdbRangeOptions { Mode = FdbStreamingMode.WantAll }; samples = samples.OrderBy(x => x.Begin).ToList(); long total = 0; int workers = Math.Min(numMachines, 8); var sw = Stopwatch.StartNew(); var tasks = new List <Task>(); while (samples.Count > 0) { while (tasks.Count < workers && samples.Count > 0) { var range = samples[0]; samples.RemoveAt(0); tasks.Add(Task.Run(async() => { var hh = new RobustHistogram(RobustHistogram.TimeScale.Ticks); #region Method 1: get_range everything... using (var tr = await db.BeginTransactionAsync(ct)) { long keySize = 0; long valueSize = 0; long count = 0; int iter = 0; var beginSelector = KeySelector.FirstGreaterOrEqual(range.Begin); var endSelector = KeySelector.FirstGreaterOrEqual(range.End); while (true) { FdbRangeChunk data = default(FdbRangeChunk); FdbException error = null; try { data = await tr.Snapshot.GetRangeAsync( beginSelector, endSelector, rangeOptions, iter ).ConfigureAwait(false); } catch (FdbException e) { error = e; } if (error != null) { await tr.OnErrorAsync(error.Code).ConfigureAwait(false); continue; } if (data.Count == 0) { break; } count += data.Count; foreach (var kvp in data) { keySize += kvp.Key.Count; valueSize += kvp.Value.Count; hh.Add(TimeSpan.FromTicks(kvp.Key.Count + kvp.Value.Count)); } if (!data.HasMore) { break; } beginSelector = KeySelector.FirstGreaterThan(data.Last); ++iter; } long totalSize = keySize + valueSize; Interlocked.Add(ref total, totalSize); Console.WriteLine("{0,9}{1,10}{2,10}{3,10} : {4}", count.ToString("N0"), FormatSize(keySize), FormatSize(valueSize), FormatSize(totalSize), hh.GetDistribution(begin: 1, end: 10000, fold: 2)); } #endregion #region Method 2: estimate the count using key selectors... //long counter = await Fdb.System.EstimateCountAsync(db, range, ct); //Console.WriteLine("COUNT = " + counter.ToString("N0")); #endregion }, ct)); } var done = await Task.WhenAny(tasks); tasks.Remove(done); } await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine("> Sampled " + FormatSize(total) + " (" + total.ToString("N0") + " bytes) in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec"); Console.WriteLine("> Estimated total size is " + FormatSize(total * ranges.Count / sz)); }
public static async ValueTask <IFdbReadOnlyTransaction> BeginReadOnlyTransactionAsync(this IFdbDatabase db, CancellationToken ct) { Contract.NotNull(db, nameof(db)); return(await db.BeginTransactionAsync(FdbTransactionMode.ReadOnly, ct, default(FdbOperationContext))); }
private static async Task BenchMergeSortAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct) { Console.WriteLine($"=== BenchMergeSort(N={N:N0}, K={K:N0}, B={B:N0}) ==="); // create multiple lists var location = db.Root.ByKey("MergeSort"); await db.WriteAsync(async tr => { var subspace = await location.Resolve(tr); tr.ClearRange(subspace); }, ct); var sources = Enumerable.Range(0, K).Select(i => 'A' + i).ToArray(); var rnd = new Random(); // insert a number of random number lists Console.Write($"> Inserting {(K * N):N0} items... "); foreach (var source in sources) { using (var tr = await db.BeginTransactionAsync(ct)) { var list = await location.ByKey(source).Resolve(tr); for (int i = 0; i < N; i++) { tr.Set(list.Encode(rnd.Next()), Slice.FromInt32(i)); } await tr.CommitAsync(); } } Console.WriteLine("Done"); // merge/sort them to get only one (hopefully sorted) list using (var tr = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(tr); var mergesort = tr .MergeSort( sources.Select(source => KeySelectorPair.StartsWith(subspace.Encode(source))), (kvp) => subspace.DecodeLast <int>(kvp.Key) ) .Take(B) .Select(kvp => subspace.Unpack(kvp.Key)); Console.Write($"> MergeSort with limit {B:N0}... "); var sw = Stopwatch.StartNew(); var results = await mergesort.ToListAsync(); sw.Stop(); Console.WriteLine("Done"); Console.WriteLine($"Took {FormatTimeMilli(sw.Elapsed.TotalMilliseconds)} to merge sort {results.Count:N0} results from {K} lists of {N} items each"); //foreach (var result in results) //{ // Console.WriteLine(result.Get<int>(-1)); //} } Console.WriteLine(); }
private static async Task BenchConcurrentInsert(IFdbDatabase db, int k, int N, int size, CancellationToken ct) { // insert a lot of small key size, in multiple batch running in // // k = number of threads // N = total number of keys // size = value size (bytes) // n = keys per batch (N/k) int n = N / k; // make sure that N is multiple of k N = n * k; Console.WriteLine($"=== BenchConcurrentInsert(k={k:N0}, N={N:N0}, size={size:N0}) ==="); Console.WriteLine($"Inserting {N:N0} keys in {k:N0} batches of {n:N0} with {size:N0}-bytes values..."); // store every key under ("Batch", i) // total estimated size of all transactions long totalPayloadSize = 0; var location = db.Root.AsBinary(); var tasks = new List <Task>(); var sem = new ManualResetEventSlim(); for (int j = 0; j < k; j++) { int offset = j; // spin a task for the batch using TaskCreationOptions.LongRunning to make sure it runs in its own thread tasks.Add(Task.Factory.StartNew(async() => { var rnd = new Random(1234567 * j); var tmp = new byte[size]; rnd.NextBytes(tmp); // block until all threads are ready sem.Wait(); var x = Stopwatch.StartNew(); using (var trans = await db.BeginTransactionAsync(ct)) { x.Stop(); //Console.WriteLine($"> [{offset}] got transaction in {FormatTimeMilli(x.Elapsed.TotalMilliseconds)}"); var subspace = await location.Resolve(trans); // package the keys... x.Restart(); for (int i = 0; i < n; i++) { // change the value a little bit tmp[0] = (byte)i; tmp[1] = (byte)(i >> 8); // ("Batch", batch_index, i) = [..random..] trans.Set(subspace[Slice.FromFixed64BE(i)], tmp.AsSlice()); } x.Stop(); //Console.WriteLine($"> [{offset}] packaged {n:N0} keys ({trans.Size:N0} bytes) in {FormatTimeMilli(x.Elapsed.TotalMilliseconds)}"); // commit the transaction x.Restart(); await trans.CommitAsync(); x.Stop(); //Console.WriteLine($"> [{offset}] committed {n} keys ({trans.Size:N0} bytes) in {FormatTimeMilli(x.Elapsed.TotalMilliseconds)}"); Interlocked.Add(ref totalPayloadSize, trans.Size); } }, TaskCreationOptions.LongRunning).Unwrap()); } // give time for threads to be ready await Task.Delay(100); // start var sw = Stopwatch.StartNew(); sem.Set(); // wait for total completion await Task.WhenAll(tasks); sw.Stop(); Console.WriteLine($"* Total: {FormatTimeMilli(sw.Elapsed.TotalMilliseconds)}, {FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)} / write, {FormatThroughput(totalPayloadSize, sw.Elapsed.TotalSeconds)}, {N / sw.Elapsed.TotalSeconds:N0} write/sec"); Console.WriteLine(); }
private static async Task BenchUpdateLotsOfKeysAsync(IFdbDatabase db, int N, CancellationToken ct) { // change one byte in a large number of keys Console.WriteLine($"=== BenchUpdateLotsOfKeys(N={N:N0}) ==="); var location = db.Root.ByKey("lists").AsTyped <int>(); var rnd = new Random(); Console.WriteLine($"> creating {N:N0} half filled keys"); var segment = new byte[60]; for (int i = 0; i < (segment.Length >> 1); i++) { segment[i] = (byte)rnd.Next(256); } using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(trans); for (int i = 0; i < N; i += 1000) { for (int k = i; k < i + 1000 && k < N; k++) { trans.Set(subspace[k], segment.AsSlice()); } await trans.CommitAsync(); Console.Write("\r" + i + " / " + N); } } Console.WriteLine($"\rChanging one byte in each of the {N:N0} keys..."); var sw = Stopwatch.StartNew(); using (var trans = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(trans); Console.WriteLine("READ"); // get all the lists var data = await trans.GetBatchAsync(Enumerable.Range(0, N).Select(i => subspace[i])); // change them Console.WriteLine("CHANGE"); for (int i = 0; i < data.Length; i++) { var list = data[i].Value.GetBytes(); list[(list.Length >> 1) + 1] = (byte)rnd.Next(256); trans.Set(data[i].Key, list.AsSlice()); } Console.WriteLine("COMMIT"); await trans.CommitAsync(); } sw.Stop(); Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to patch one byte in {N:N0} lists ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)} /update, {N / sw.Elapsed.TotalSeconds:N0} update/sec)"); Console.WriteLine(); }
private static async Task BenchBulkInsertThenBulkReadAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct, bool instrumented = false) { // test that we can bulk write / bulk read Console.WriteLine($"=== BenchBulkInsertThenBulkRead(N={N:N0}, K={K:N0}, B={B:N0}) ==="); var timings = instrumented ? new List <KeyValuePair <double, double> >() : null; // put test values inside a namespace var location = db.Root.ByKey("BulkInsert"); // cleanup everything using (var tr = await db.BeginTransactionAsync(ct)) { tr.ClearRange(await location.Resolve(tr)); await tr.CommitAsync(); } // insert all values (batched) Console.WriteLine($"Inserting {N:N0} keys: "); var insert = Stopwatch.StartNew(); int batches = 0; long bytes = 0; var start = Stopwatch.StartNew(); var tasks = new List <Task>(); foreach (var worker in FdbKey.Batched(0, N, K, B)) { //hack tasks.Add(Task.Run(async() => { foreach (var chunk in worker) { using (var tr = await db.BeginTransactionAsync(ct)) { var subspace = await location.Resolve(tr); int z = 0; foreach (int i in Enumerable.Range(chunk.Key, chunk.Value)) { tr.Set(subspace.Encode(i), Slice.Zero(256)); z++; } //Console.Write("#"); //Console.WriteLine(" Commiting batch (" + tr.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) " + z + " keys"); var localStart = start.Elapsed.TotalSeconds; await tr.CommitAsync(); var localDuration = start.Elapsed.TotalSeconds - localStart; if (instrumented) { lock (timings) { timings.Add(new KeyValuePair <double, double>(localStart, localDuration)); } } Interlocked.Increment(ref batches); Interlocked.Add(ref bytes, tr.Size); } } }, ct)); } await Task.WhenAll(tasks); insert.Stop(); Console.WriteLine($"Committed {batches:N0} batches in {FormatTimeMilli(insert.Elapsed.TotalMilliseconds)} ({FormatTimeMilli(insert.Elapsed.TotalMilliseconds / batches)} / batch, {FormatTimeMicro(insert.Elapsed.TotalMilliseconds / N)} / item)"); Console.WriteLine($"Throughput {FormatThroughput(bytes, insert.Elapsed.TotalSeconds)}, {N / insert.Elapsed.TotalSeconds:N0} write/sec"); if (instrumented) { var sb = new StringBuilder(); foreach (var kvp in timings) { sb.Append(kvp.Key.ToString()).Append(';').Append((kvp.Key + kvp.Value).ToString()).Append(';').Append(kvp.Value.ToString()).AppendLine(); } #if DEBUG System.IO.File.WriteAllText(@"c:\temp\fdb\timings_" + N + "_" + K + "_" + B + ".csv", sb.ToString()); #else Console.WriteLine(sb.ToString()); #endif } // Read values using (var tr = await db.BeginTransactionAsync(ct)) { Console.WriteLine("Reading all keys..."); var subspace = await location.Resolve(tr); var sw = Stopwatch.StartNew(); var items = await tr.GetRange(subspace.ToRange()).ToListAsync(); sw.Stop(); Console.WriteLine($"Took {FormatTimeMilli(sw.Elapsed.TotalMilliseconds)} to get {items.Count.ToString("N0", CultureInfo.InvariantCulture)} results ({items.Count / sw.Elapsed.TotalSeconds:N0} keys/sec)"); } Console.WriteLine(); }
public static ValueTask <IFdbTransaction> BeginTransactionAsync(this IFdbDatabase db, CancellationToken ct) { Contract.NotNull(db); return(db.BeginTransactionAsync(FdbTransactionMode.Default, ct, default(FdbOperationContext))); }
private async Task RunAsync(IFdbDatabase db, IDynamicKeySubspace location, CancellationToken ct, Action done, int N, int K, int W) { if (db == null) { throw new ArgumentNullException(nameof(db)); } StringBuilder sb = new StringBuilder(); db.SetDefaultLogHandler((log) => { sb.AppendLine(log.GetTimingsReport(true)); //Console.WriteLine(log.GetTimingsReport(true)); }); try { var workerPool = new FdbWorkerPool(location); Console.WriteLine($"workerPool at {location.GetPrefix():P}"); var workerSignal = new AsyncCancelableMutex(ct); var clientSignal = new AsyncCancelableMutex(ct); int taskCounter = 0; int msgSent = 0; int msgReceived = 0; Func <FdbWorkerMessage, CancellationToken, Task> handler = async(msg, _ct) => { Interlocked.Increment(ref msgReceived); //await Task.Delay(10 + Math.Abs(msg.Id.GetHashCode()) % 50); await Task.Delay(10).ConfigureAwait(false); }; Func <int, Task> worker = async(id) => { await workerSignal.Task.ConfigureAwait(false); Console.WriteLine("Worker #" + id + " is starting"); try { await workerPool.RunWorkerAsync(db, handler, ct).ConfigureAwait(false); } finally { Console.WriteLine("Worker #" + id + " has stopped"); } }; Func <int, Task> client = async(id) => { await clientSignal.Task.ConfigureAwait(false); await Task.Delay(10).ConfigureAwait(false); var rnd = new Random(id * 111); for (int i = 0; i < N; i++) { var taskId = Slice.FromString("T" + Interlocked.Increment(ref taskCounter)); var taskBody = Slice.FromString("Message " + (i + 1) + " of " + N + " from client #" + id); await workerPool.ScheduleTaskAsync(db, taskId, taskBody, ct).ConfigureAwait(false); Interlocked.Increment(ref msgSent); //if (i > 0 && i % 10 == 0) Console.WriteLine("@@@ Client#" + id + " pushed " + (i + 1) + " / " + N + " messages"); switch (rnd.Next(5)) { case 0: await Task.Delay(10).ConfigureAwait(false); break; case 1: await Task.Delay(100).ConfigureAwait(false); break; case 2: await Task.Delay(500).ConfigureAwait(false); break; } } Console.WriteLine("@@@ Client#" + id + " has finished!"); }; Func <string, Task> dump = async(label) => { Console.WriteLine($"<dump label=\'{label}\' key=\'{location.GetPrefix():P}\'>"); using (var tr = await db.BeginTransactionAsync(ct)) { await tr.Snapshot .GetRange(KeyRange.StartsWith(location.GetPrefix())) .ForEachAsync((kvp) => { Console.WriteLine($" - {location.PrettyPrint(kvp.Key)} = {kvp.Value:V}"); }).ConfigureAwait(false); } Console.WriteLine("</dump>"); }; var workers = Enumerable.Range(0, W).Select((i) => worker(i)).ToArray(); var clients = Enumerable.Range(0, K).Select((i) => client(i)).ToArray(); DateTime start = DateTime.Now; DateTime last = start; int lastHandled = -1; using (var timer = new Timer((_) => { var now = DateTime.Now; Console.WriteLine("@@@ T=" + now.Subtract(start) + ", sent: " + msgSent.ToString("N0") + ", recv: " + msgReceived.ToString("N0")); Console.WriteLine("### Workers: " + workerPool.IdleWorkers + " / " + workerPool.ActiveWorkers + " (" + new string('#', workerPool.IdleWorkers) + new string('.', workerPool.ActiveWorkers - workerPool.IdleWorkers) + "), sent: " + workerPool.MessageScheduled.ToString("N0") + ", recv: " + workerPool.MessageReceived.ToString("N0") + ", delta: " + (workerPool.MessageScheduled - workerPool.MessageReceived).ToString("N0") + ", busy: " + workerPool.WorkerBusyTime + " (avg " + workerPool.WorkerAverageBusyDuration.TotalMilliseconds.ToString("N3") + " ms)"); if (now.Subtract(last).TotalSeconds >= 10) { //dump("timer").GetAwaiter().GetResult(); last = now; if (lastHandled == msgReceived) { // STALL ? Console.WriteLine("STALL! "); done(); } lastHandled = msgReceived; } if (msgReceived >= K * N) { dump("complete").GetAwaiter().GetResult(); done(); } }, null, 1000, 1000)) { var sw = Stopwatch.StartNew(); // start the workers workerSignal.Set(async: true); await Task.Delay(500); await dump("workers started"); // start the clients clientSignal.Set(async: true); await Task.WhenAll(clients); Console.WriteLine("Clients completed after " + sw.Elapsed); await Task.WhenAll(workers); Console.WriteLine("Workers completed after " + sw.Elapsed); } } finally { Console.WriteLine("---------------------------------------------------------------------------"); Console.WriteLine("Transaction logs:"); Console.WriteLine(); Console.WriteLine(sb.ToString()); } }