private static async Task BenchConcurrentReadAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // read a lot of small keys, concurrently

            Console.WriteLine($"=== BenchConcurrentRead(N={N:N0}) ===");
            Console.WriteLine($"Reading {N:N0} keys (concurrent)");

            var location = db.Partition.ByKey("hello");

            var keys = Enumerable.Range(0, N).Select(i => location.Keys.Encode(i)).ToArray();

            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                _ = await Task.WhenAll(Enumerable
                                       .Range(0, keys.Length)
                                       .Select((i) => trans.GetAsync(keys[i]))
                                       );
            }
            sw.Stop();
            Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {N} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length)}/read, {N / sw.Elapsed.TotalSeconds:N0} read/sec)");
            Console.WriteLine();

            sw = Stopwatch.StartNew();
            using (var trans = db.BeginTransaction(ct))
            {
                _ = await trans.GetBatchAsync(keys);
            }
            sw.Stop();
            Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {keys.Length:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length)}/read, {N / sw.Elapsed.TotalSeconds:N0} read/sec)");
            Console.WriteLine();
        }
        private static async Task BenchConcurrentReadAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // read a lot of small keys, concurrently

            Console.WriteLine("Reading " + N + " keys (concurrent)");

            var location = db.Partition("hello");

            var keys = Enumerable.Range(0, N).Select(i => location.Pack(i)).ToArray();

            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                var results = await Task.WhenAll(Enumerable
                                                 .Range(0, keys.Length)
                                                 .Select((i) => trans.GetAsync(keys[i]))
                                                 );
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");

            sw = Stopwatch.StartNew();
            using (var trans = db.BeginTransaction(ct))
            {
                var results = await trans.GetBatchAsync(keys);
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + keys.Length + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");
        }
        private static async Task BenchUpdateLotsOfKeysAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // change one byte in a large number of keys

            Console.WriteLine($"=== BenchUpdateLotsOfKeys(N={N:N0}) ===");

            var location = db.Partition.ByKey("lists");

            var rnd  = new Random();
            var keys = Enumerable.Range(0, N).Select(x => location.Keys.Encode(x)).ToArray();

            Console.WriteLine($"> creating {N:N0} half filled keys");
            var segment = new byte[60];

            for (int i = 0; i < (segment.Length >> 1); i++)
            {
                segment[i] = (byte)rnd.Next(256);
            }
            using (var trans = db.BeginTransaction(ct))
            {
                for (int i = 0; i < N; i += 1000)
                {
                    for (int k = i; k < i + 1000 && k < N; k++)
                    {
                        trans.Set(keys[k], segment.AsSlice());
                    }
                    await trans.CommitAsync();

                    Console.Write("\r" + i + " / " + N);
                }
            }

            Console.WriteLine($"\rChanging one byte in each of the {N:N0} keys...");
            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                Console.WriteLine("READ");
                // get all the lists
                var data = await trans.GetBatchAsync(keys);

                // change them
                Console.WriteLine("CHANGE");
                for (int i = 0; i < data.Length; i++)
                {
                    var list = data[i].Value.GetBytes();
                    list[(list.Length >> 1) + 1] = (byte)rnd.Next(256);
                    trans.Set(data[i].Key, list.AsSlice());
                }

                Console.WriteLine("COMMIT");
                await trans.CommitAsync();
            }
            sw.Stop();

            Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to patch one byte in {N:N0} lists ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)} /update, {N / sw.Elapsed.TotalSeconds:N0} update/sec)");
            Console.WriteLine();
        }
        private static async Task BenchMergeSortAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct)
        {
            Console.WriteLine($"=== BenchMergeSort(N={N:N0}, K={K:N0}, B={B:N0}) ===");

            // create multiple lists
            var location = db.GlobalSpace.Partition.ByKey("MergeSort");
            await db.ClearRangeAsync(location, ct);

            var sources = Enumerable.Range(0, K).Select(i => 'A' + i).ToArray();
            var rnd     = new Random();

            // insert a number of random number lists
            Console.Write($"> Inserting {(K * N):N0} items... ");
            foreach (var source in sources)
            {
                using (var tr = db.BeginTransaction(ct))
                {
                    var list = location.Partition.ByKey(source);
                    for (int i = 0; i < N; i++)
                    {
                        tr.Set(list.Keys.Encode(rnd.Next()), Slice.FromInt32(i));
                    }
                    await tr.CommitAsync();
                }
            }
            Console.WriteLine("Done");

            // merge/sort them to get only one (hopefully sorted) list

            using (var tr = db.BeginTransaction(ct))
            {
                var mergesort = tr
                                .MergeSort(
                    sources.Select(source => KeySelectorPair.StartsWith(location.Keys.Encode(source))),
                    (kvp) => location.Keys.DecodeLast <int>(kvp.Key)
                    )
                                .Take(B)
                                .Select(kvp => location.Keys.Unpack(kvp.Key));

                Console.Write($"> MergeSort with limit {B:N0}... ");
                var sw      = Stopwatch.StartNew();
                var results = await mergesort.ToListAsync();

                sw.Stop();
                Console.WriteLine("Done");

                Console.WriteLine($"Took {FormatTimeMilli(sw.Elapsed.TotalMilliseconds)} to merge sort {results.Count:N0} results from {K} lists of {N} items each");

                //foreach (var result in results)
                //{
                //	Console.WriteLine(result.Get<int>(-1));
                //}
            }
            Console.WriteLine();
        }
        private static async Task BenchMergeSortAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct)
        {
            // create multiple lists
            var location = db.Partition("MergeSort");
            await db.ClearRangeAsync(location, ct);

            var sources = Enumerable.Range(0, K).Select(i => 'A' + i).ToArray();
            var rnd     = new Random();

            // insert a number of random number lists
            Console.Write("> Inserting " + (K * N).ToString("N0", CultureInfo.InvariantCulture) + " items... ");
            foreach (var source in sources)
            {
                using (var tr = db.BeginTransaction(ct))
                {
                    var list = location.Partition(source);
                    for (int i = 0; i < N; i++)
                    {
                        tr.Set(list.Pack(rnd.Next()), Slice.FromInt32(i));
                    }
                    await tr.CommitAsync();
                }
            }
            Console.WriteLine("Done");

            // merge/sort them to get only one (hopefully sorted) list

            using (var tr = db.BeginTransaction(ct))
            {
                var mergesort = tr
                                .MergeSort(
                    sources.Select(source => FdbKeySelectorPair.StartsWith(location.Pack(source))),
                    (kvp) => location.UnpackLast <int>(kvp.Key)
                    )
                                .Take(B)
                                .Select(kvp => location.Unpack(kvp.Key));

                Console.Write("> MergeSort with limit " + B + "... ");
                var sw      = Stopwatch.StartNew();
                var results = await mergesort.ToListAsync();

                sw.Stop();
                Console.WriteLine("Done");

                Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to merge sort " + results.Count + " results from " + K + " lists of " + N + " items each");

                //foreach (var result in results)
                //{
                //	Console.WriteLine(result.Get<int>(-1));
                //}
            }
        }
        private static async Task BenchSerialReadAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            Console.WriteLine("Reading " + N + " keys (serial, slow!)");

            // read a lot of small keys, one by one

            var location = db.Partition("hello");

            var sw = Stopwatch.StartNew();

            for (int k = 0; k < N; k += 1000)
            {
                using (var trans = db.BeginTransaction(ct))
                {
                    for (int i = k; i < N && i < k + 1000; i++)
                    {
                        var result = await trans.GetAsync(location.Pack(i));
                    }
                }
                Console.Write(".");
            }
            Console.WriteLine();
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
        }
        private static async Task BenchInsertSmallKeysAsync(IFdbDatabase db, int N, int size, CancellationToken ct)
        {
            // insert a lot of small key size, in a single transaction
            var rnd = new Random();
            var tmp = new byte[size];

            var subspace = db.Partition("Batch");

            var times = new List <TimeSpan>();

            for (int k = 0; k <= 4; k++)
            {
                var sw = Stopwatch.StartNew();
                using (var trans = db.BeginTransaction(ct))
                {
                    rnd.NextBytes(tmp);
                    for (int i = 0; i < N; i++)
                    {
                        tmp[0] = (byte)i;
                        tmp[1] = (byte)(i >> 8);
                        // (Batch, 1) = [......]
                        // (Batch, 2) = [......]
                        trans.Set(subspace.Pack(k * N + i), Slice.Create(tmp));
                    }
                    await trans.CommitAsync();
                }
                sw.Stop();
                times.Add(sw.Elapsed);
            }
            var min = times.Min();

            Console.WriteLine("[" + Thread.CurrentThread.ManagedThreadId + "] Took " + min.TotalSeconds.ToString("N3", CultureInfo.InvariantCulture) + " sec to insert " + N + " " + size + "-bytes items (" + FormatTimeMicro(min.TotalMilliseconds / N) + "/write)");
        }
        private static async Task BenchUpdateSameKeyLotsOfTimesAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // continuously update same key by adding a little bit more

            Console.WriteLine($"=== BenchUpdateSameKeyLotsOfTimes(N={N:N0}) ===");
            Console.WriteLine($"Updating the same list {N:N0} times...");

            var list   = new byte[N];
            var update = Stopwatch.StartNew();
            var key    = db.GlobalSpace.Keys.Encode("list");

            for (int i = 0; i < N; i++)
            {
                list[i] = (byte)i;
                using (var trans = db.BeginTransaction(ct))
                {
                    trans.Set(key, list.AsSlice());
                    await trans.CommitAsync();
                }
                if (i % 100 == 0)
                {
                    Console.Write($"\r> {i:N0} / {N:N0}");
                }
            }
            update.Stop();

            Console.WriteLine($"\rTook {update.Elapsed.TotalSeconds:N3} sec to fill a byte[{N:N0}] one by one ({FormatTimeMicro(update.Elapsed.TotalMilliseconds / N)}/update, {N / update.Elapsed.TotalSeconds:N0} update/sec)");
            Console.WriteLine();
        }
        private static async Task BenchSerialReadAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // read a lot of small keys, one by one

            Console.WriteLine($"=== BenchSerialRead(N={N:N0}) ===");
            Console.WriteLine($"Reading {N:N0} keys (serial, slow!)");

            var location = db.Partition.ByKey("hello");

            var sw = Stopwatch.StartNew();

            for (int k = 0; k < N; k += 1000)
            {
                using (var trans = db.BeginTransaction(ct))
                {
                    for (int i = k; i < N && i < k + 1000; i++)
                    {
                        _ = await trans.GetAsync(location.Keys.Encode(i));
                    }
                }
                Console.Write(".");
            }
            Console.WriteLine();
            sw.Stop();
            Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {N:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/read, {N / sw.Elapsed.TotalSeconds:N0} read/sec)");
            Console.WriteLine();
        }
        private static async Task BenchUpdateSameKeyLotsOfTimesAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // continuously update same key by adding a little bit more

            Console.WriteLine("Updating the same list " + N + " times...");

            var list   = new byte[N];
            var update = Stopwatch.StartNew();
            var key    = db.GlobalSpace.Pack("list");

            for (int i = 0; i < N; i++)
            {
                list[i] = (byte)i;
                using (var trans = db.BeginTransaction(ct))
                {
                    trans.Set(key, Slice.Create(list));
                    await trans.CommitAsync();
                }
                if (i % 100 == 0)
                {
                    Console.Write("\r> " + i + " / " + N);
                }
            }
            update.Stop();

            Console.WriteLine("\rTook " + update.Elapsed + " to fill a byte[" + N + "] one by one (" + FormatTimeMicro(update.Elapsed.TotalMilliseconds / N) + "/update)");
        }
Exemple #11
0
 protected async Task DeleteSubspace(IFdbDatabase db, IKeySubspace subspace)
 {
     using (var tr = db.BeginTransaction(this.Cancellation))
     {
         tr.ClearRange(subspace);
         await tr.CommitAsync();
     }
 }
 public static IFdbTransaction BeginTransaction(this IFdbDatabase db, CancellationToken cancellationToken)
 {
     if (db == null)
     {
         throw new ArgumentNullException("db");
     }
     return(db.BeginTransaction(FdbTransactionMode.Default, cancellationToken, default(FdbOperationContext)));
 }
        private static async Task BurnerThread(IFdbDatabase db, CancellationToken ct)
        {
            var folder = await db.ReadWriteAsync(async tr =>
            {
                var x = await db.Directory.CreateOrOpenAsync(tr, new[] { "Benchmarks", "Burner", "Sequential" });
                tr.ClearRange(x);
                return(x);
            }, ct);

            long pos = 0;

            Random rnd;

            lock (Rnd)
            {
                rnd = new Random(Rnd.Next());
            }

            using (var tr = db.BeginTransaction(ct))
            {
                while (!ct.IsCancellationRequested)
                {
                    FdbException error = null;
                    try
                    {
                        tr.Reset();

                        for (int i = 0; i < N; i++)
                        {
                            long x = Randomized
                                                                ? rnd.Next()
                                                                : pos + i;

                            tr.Set(folder.Keys.Encode(x, Suffix), Value);
                            Interlocked.Increment(ref Keys);
                        }
                        pos += N;

                        await tr.CommitAsync();

                        Interlocked.Increment(ref Transactions);
                        Interlocked.Add(ref Bytes, tr.Size);
                    }
                    catch (FdbException e)
                    {
                        error = e;
                    }

                    if (error != null && !ct.IsCancellationRequested)
                    {
                        await tr.OnErrorAsync(error.Code);
                    }
                }
            }
        }
        public static async Task DumpSubspace([NotNull] IFdbDatabase db, [NotNull] IFdbSubspace subspace, CancellationToken ct)
        {
            Assert.That(db, Is.Not.Null);
            Assert.That(db.GlobalSpace.Contains(subspace.Key), Is.True, "Using a location outside of the test database partition!!! This is probably a bug in the test...");

            // do not log
            db = db.WithoutLogging();

            using (var tr = db.BeginTransaction(ct))
            {
                await DumpSubspace(tr, subspace).ConfigureAwait(false);
            }
        }
Exemple #15
0
        public async Task CanReadKeyThatIsPresentInDatabase()
        {
            var key   = KeyHelper.GetRandomKey();
            var value = new byte[] { 0x01, 0x02, 0x03 };

            using (var tx = _db.BeginTransaction())
            {
                tx.Set(key.Span, value);

                await tx.CommitAsync();
            }

            FdbValue fromDb;

            using (var tx = _db.BeginSnapshotTransaction())
            {
                fromDb = await tx.GetAsync(key.Span);
            }

            Check.That(fromDb.IsPresent).IsTrue();
            Check.That(fromDb.Value.ToArray()).IsEqualTo(value);
        }
        private static async Task TestSimpleTransactionAsync(IFdbDatabase db, CancellationToken ct)
        {
            Console.WriteLine("=== TestSimpleTransaction() ===");

            var location = db.GlobalSpace;

            Console.WriteLine("Starting new transaction...");
            using (var trans = db.BeginTransaction(ct))
            {
                Console.WriteLine("> Transaction ready");

                Console.WriteLine("Getting read version...");
                var readVersion = await trans.GetReadVersionAsync();

                Console.WriteLine("> Read Version = " + readVersion);

                Console.WriteLine("Getting 'hello'...");
                trans.Set(location.Keys.Encode("hello"), Slice.FromString("hello"));
                var result = await trans.GetAsync(location.Keys.Encode("hello"));

                if (result.IsNull)
                {
                    Console.WriteLine("> hello NOT FOUND");
                }
                else
                {
                    Console.WriteLine($"> hello = {result:V}");
                }

                Console.WriteLine("Setting 'Foo' = 'Bar'");
                trans.Set(location.Keys.Encode("Foo"), Slice.FromString("Bar"));

                Console.WriteLine("Setting 'TopSecret' = rnd(512)");
                var data = new byte[512];
                new Random(1234).NextBytes(data);
                trans.Set(location.Keys.Encode("TopSecret"), data.AsSlice());

                Console.WriteLine("Committing transaction...");
                await trans.CommitAsync();

                //trans.Commit();
                Console.WriteLine("> Committed!");

                Console.WriteLine("Getting comitted version...");
                var writeVersion = trans.GetCommittedVersion();
                Console.WriteLine("> Commited Version = " + writeVersion);
            }
        }
Exemple #17
0
        public async Task RunStatus(IFdbDatabase db, CancellationToken ct)
        {
            var countersLocation   = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('C'));
            var idleLocation       = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('I'));
            var busyLocation       = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('B'));
            var tasksLocation      = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('T'));
            var unassignedLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('U'));

            using (var tr = db.BeginTransaction(ct))
            {
                var counters = await tr.Snapshot.GetRange(countersLocation.Keys.ToRange()).Select(kvp => new KeyValuePair <string, long>(countersLocation.Keys.DecodeLast <string>(kvp.Key), kvp.Value.ToInt64())).ToListAsync().ConfigureAwait(false);

                Console.WriteLine("Status at " + DateTimeOffset.Now.ToString("O"));
                foreach (var counter in counters)
                {
                    Console.WriteLine(" - " + counter.Key + " = " + counter.Value);
                }

                Console.WriteLine("Dump:");
                Console.WriteLine("> Idle");
                await tr.Snapshot.GetRange(idleLocation.Keys.ToRange()).ForEachAsync((kvp) =>
                {
                    Console.WriteLine($"- Idle.{idleLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}");
                });

                Console.WriteLine("> Busy");
                await tr.Snapshot.GetRange(busyLocation.Keys.ToRange()).ForEachAsync((kvp) =>
                {
                    Console.WriteLine($"- Busy.{busyLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}");
                });

                Console.WriteLine("> Unassigned");
                await tr.Snapshot.GetRange(unassignedLocation.Keys.ToRange()).ForEachAsync((kvp) =>
                {
                    Console.WriteLine($"- Unassigned.{unassignedLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}");
                });

                Console.WriteLine("> Tasks");
                await tr.Snapshot.GetRange(tasksLocation.Keys.ToRange()).ForEachAsync((kvp) =>
                {
                    Console.WriteLine($"- Tasks.{tasksLocation.Keys.Unpack(kvp.Key)} = {kvp.Value:V}");
                });

                Console.WriteLine("<");
            }
        }
        public virtual IFdbTransaction BeginTransaction(FdbTransactionMode mode, CancellationToken ct = default(CancellationToken), FdbOperationContext context = null)
        {
            ThrowIfDisposed();

            // enforce read-only mode!
            if (m_readOnly)
            {
                mode |= FdbTransactionMode.ReadOnly;
            }

            if (context == null)
            {
                context = new FdbOperationContext(this, mode, ct);
            }

            return(m_database.BeginTransaction(mode, ct, context));
        }
        private static async Task BenchClearAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // clear a lot of small keys, in a single transaction

            var location = db.Partition(Slice.FromAscii("hello"));

            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                for (int i = 0; i < N; i++)
                {
                    trans.Clear(location.Pack(i));
                }

                await trans.CommitAsync();
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to clear " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/write)");
        }
        private static async Task BenchSerialWriteAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // read a lot of small keys, one by one

            var location = db.Partition("hello");

            var             sw    = Stopwatch.StartNew();
            IFdbTransaction trans = null;

            try
            {
                for (int i = 0; i < N; i++)
                {
                    if (trans == null)
                    {
                        trans = db.BeginTransaction(ct);
                    }
                    trans.Set(location.Pack(i), Slice.FromInt32(i));
                    if (trans.Size > 100 * 1024)
                    {
                        await trans.CommitAsync();

                        trans.Dispose();
                        trans = null;
                    }
                }
                await trans.CommitAsync();
            }
            finally
            {
                if (trans != null)
                {
                    trans.Dispose();
                }
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
        }
        private static async Task BenchInsertSmallKeysAsync(IFdbDatabase db, int N, int size, CancellationToken ct)
        {
            // insert a lot of small key size, in a single transaction

            Console.WriteLine($"=== BenchInsertSmallKeys(N={N:N0}, size={size:N0}) ===");

            var rnd = new Random();
            var tmp = new byte[size];

            var subspace = db.Partition.ByKey("Batch");

            var times = new List <TimeSpan>();

            for (int k = 0; k <= 4; k++)
            {
                var sw = Stopwatch.StartNew();
                using (var trans = db.BeginTransaction(ct))
                {
                    rnd.NextBytes(tmp);
                    for (int i = 0; i < N; i++)
                    {
                        tmp[0] = (byte)i;
                        tmp[1] = (byte)(i >> 8);
                        // (Batch, 1) = [......]
                        // (Batch, 2) = [......]
                        trans.Set(subspace.Keys.Encode(k * N + i), tmp.AsSlice());
                    }
                    await trans.CommitAsync();
                }
                sw.Stop();
                times.Add(sw.Elapsed);
            }
            var min = times.Min();
            var avg = times.Sum(x => x.TotalMilliseconds) / times.Count;

            Console.WriteLine($"[{Thread.CurrentThread.ManagedThreadId}] Took {min.TotalSeconds.ToString("N3", CultureInfo.InvariantCulture)} sec to insert {N} {size}-bytes items (min={FormatTimeMicro(min.TotalMilliseconds / N)}/write, avg={FormatTimeMicro(avg)}/write)");
            Console.WriteLine();
        }
        private static async Task BenchClearAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // clear a lot of small keys, in a single transaction

            Console.WriteLine($"=== BenchClear(N={N:N0}) ===");

            var location = db.Partition.ByKey(Slice.FromStringAscii("hello"));

            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                for (int i = 0; i < N; i++)
                {
                    trans.Clear(location.Keys.Encode(i));
                }

                await trans.CommitAsync();
            }
            sw.Stop();
            Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to clear {N:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/write, {N / sw.Elapsed.TotalSeconds:N0} clear/sec)");
            Console.WriteLine();
        }
        private static async Task BenchSerialWriteAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // read a lot of small keys, one by one

            Console.WriteLine($"=== BenchSerialWrite(N={N:N0}) ===");

            var             location = db.Partition.ByKey("hello");
            var             sw       = Stopwatch.StartNew();
            IFdbTransaction trans    = null;

            try
            {
                for (int i = 0; i < N; i++)
                {
                    if (trans == null)
                    {
                        trans = db.BeginTransaction(ct);
                    }
                    trans.Set(location.Keys.Encode(i), Slice.FromInt32(i));
                    if (trans.Size > 100 * 1024)
                    {
                        await trans.CommitAsync();

                        trans.Dispose();
                        trans = null;
                    }
                }
                await trans.CommitAsync();
            }
            finally
            {
                trans?.Dispose();
            }
            sw.Stop();
            Console.WriteLine($"Took {sw.Elapsed.TotalSeconds:N3} sec to read {N:N0} items ({FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N)}/read, {N/sw.Elapsed.TotalSeconds:N0} read/sec)");
            Console.WriteLine();
        }
		private static async Task BenchConcurrentInsert(IFdbDatabase db, int k, int N, int size, CancellationToken ct)
		{
			// insert a lot of small key size, in multiple batch running in //
			// k = number of threads
			// N = total number of keys
			// size = value size (bytes)
			// n = keys per batch (N/k)

			int n = N / k;
			// make sure that N is multiple of k
			N = n * k;

			Console.WriteLine("Inserting " + N + " keys in " + k + " batches of " + n + " with " + size + "-bytes values...");

			// store every key under ("Batch", i)
			var subspace = db.Partition.ByKey("Batch");
			// total estimated size of all transactions
			long totalPayloadSize = 0;

			var tasks = new List<Task>();
			var sem = new ManualResetEventSlim();
			for (int j = 0; j < k; j++)
			{
				int offset = j;
				// spin a task for the batch using TaskCreationOptions.LongRunning to make sure it runs in its own thread
				tasks.Add(Task.Factory.StartNew(async () =>
				{
					var rnd = new Random(1234567 * j);
					var tmp = new byte[size];
					rnd.NextBytes(tmp);

					// block until all threads are ready
					sem.Wait();

					var x = Stopwatch.StartNew();
					using (var trans = db.BeginTransaction(ct))
					{
						x.Stop();
						Console.WriteLine("> [" + offset + "] got transaction in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

						// package the keys...
						x.Restart();
						for (int i = 0; i < n; i++)
						{
							// change the value a little bit
							tmp[0] = (byte)i;
							tmp[1] = (byte)(i >> 8);

							// ("Batch", batch_index, i) = [..random..]
							trans.Set(subspace.Keys.Encode(i), Slice.Create(tmp));
						}
						x.Stop();
						Console.WriteLine("> [" + offset + "] packaged " + n + " keys (" + trans.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

						// commit the transaction
						x.Restart();
						await trans.CommitAsync();
						x.Stop();
						Console.WriteLine("> [" + offset + "] committed " + n + " keys (" + trans.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

						Interlocked.Add(ref totalPayloadSize, trans.Size);
					}

				}, TaskCreationOptions.LongRunning).Unwrap());
			}
			// give time for threads to be ready
			await Task.Delay(100);

			// start
			var sw = Stopwatch.StartNew();
			sem.Set();

			// wait for total completion
			await Task.WhenAll(tasks);
			sw.Stop();
			Console.WriteLine("* Total: " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + ", " + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + " / write, " + FormatThroughput(totalPayloadSize, sw.Elapsed.TotalSeconds));
			Console.WriteLine();
		}
		private static async Task BenchMergeSortAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct)
		{
			// create multiple lists
			var location = db.Partition.ByKey("MergeSort");
			await db.ClearRangeAsync(location, ct);

			var sources = Enumerable.Range(0, K).Select(i => 'A' + i).ToArray();
			var rnd = new Random();

			// insert a number of random number lists
			Console.Write("> Inserting " + (K * N).ToString("N0", CultureInfo.InvariantCulture) + " items... ");
			foreach (var source in sources)
			{
				using (var tr = db.BeginTransaction(ct))
				{
					var list = location.Partition.ByKey(source);
					for (int i = 0; i < N; i++)
					{
						tr.Set(list.Keys.Encode(rnd.Next()), Slice.FromInt32(i));
					}
					await tr.CommitAsync();
				}
			}
			Console.WriteLine("Done");

			// merge/sort them to get only one (hopefully sorted) list

			using (var tr = db.BeginTransaction(ct))
			{
				var mergesort = tr
					.MergeSort(
						sources.Select(source => FdbKeySelectorPair.StartsWith(location.Keys.Encode(source))),
						(kvp) => location.Keys.DecodeLast<int>(kvp.Key)
					)
					.Take(B)
					.Select(kvp => location.Keys.Unpack(kvp.Key));

				Console.Write("> MergeSort with limit " + B + "... ");
				var sw = Stopwatch.StartNew();
				var results = await mergesort.ToListAsync();
				sw.Stop();
				Console.WriteLine("Done");

				Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to merge sort " + results.Count + " results from " + K + " lists of " + N + " items each");

				//foreach (var result in results)
				//{
				//	Console.WriteLine(result.Get<int>(-1));
				//}
			}
		}
		private static async Task BenchBulkInsertThenBulkReadAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct, bool instrumented = false)
		{
			// test that we can bulk write / bulk read

			var timings = instrumented ? new List<KeyValuePair<double, double>>() : null;

			// put test values inside a namespace
			var subspace = db.Partition.ByKey("BulkInsert");

			// cleanup everything
			using (var tr = db.BeginTransaction(ct))
			{
				tr.ClearRange(subspace);
				await tr.CommitAsync();
			}

			// insert all values (batched)
			Console.WriteLine("Inserting " + N.ToString("N0", CultureInfo.InvariantCulture) + " keys: ");
			var insert = Stopwatch.StartNew();
			int batches = 0;
			long bytes = 0;

			var start = Stopwatch.StartNew();

			var tasks = new List<Task>();
			foreach (var worker in FdbKey.Batched(0, N, K, B))
			{
				//hack
				tasks.Add(Task.Run(async () =>
				{
					foreach (var chunk in worker)
					{
						using (var tr = db.BeginTransaction(ct))
						{
							int z = 0;
							foreach (int i in Enumerable.Range(chunk.Key, chunk.Value))
							{
								tr.Set(subspace.Keys.Encode(i), Slice.Create(new byte[256]));
								z++;
							}

							//Console.Write("#");
							//Console.WriteLine("  Commiting batch (" + tr.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) " + z + " keys");
							var localStart = start.Elapsed.TotalSeconds;
							await tr.CommitAsync();
							var localDuration = start.Elapsed.TotalSeconds - localStart;
							if (instrumented)
							{
								lock (timings) { timings.Add(new KeyValuePair<double, double>(localStart, localDuration)); }
							}
							Interlocked.Increment(ref batches);
							Interlocked.Add(ref bytes, tr.Size);
						}

					}
				}, ct));

			}
			await Task.WhenAll(tasks);

			insert.Stop();
			Console.WriteLine("Committed " + batches + " batches in " + FormatTimeMilli(insert.Elapsed.TotalMilliseconds) + " (" + FormatTimeMilli(insert.Elapsed.TotalMilliseconds / batches) + " / batch, " + FormatTimeMicro(insert.Elapsed.TotalMilliseconds / N) + " / item");
			Console.WriteLine("Throughput " + FormatThroughput(bytes, insert.Elapsed.TotalSeconds));

			if (instrumented)
			{
				var sb = new StringBuilder();
				foreach (var kvp in timings)
				{
					sb.Append(kvp.Key.ToString()).Append(';').Append((kvp.Key + kvp.Value).ToString()).Append(';').Append(kvp.Value.ToString()).AppendLine();
				}
#if DEBUG
				System.IO.File.WriteAllText(@"c:\temp\fdb\timings_" + N + "_" + K + "_" + B + ".csv", sb.ToString());
#else
                Console.WriteLine(sb.ToString());
#endif
			}

			// Read values

			using (var tr = db.BeginTransaction(ct))
			{
				Console.WriteLine("Reading all keys...");
				var sw = Stopwatch.StartNew();
				var items = await tr.GetRangeStartsWith(subspace).ToListAsync();
				sw.Stop();
				Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to get " + items.Count.ToString("N0", CultureInfo.InvariantCulture) + " results");
			}
		}
		private static async Task BenchUpdateLotsOfKeysAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// change one byte in a large number of keys

			var location = db.Partition.ByKey("lists");

			var rnd = new Random();
			var keys = Enumerable.Range(0, N).Select(x => location.Keys.Encode(x)).ToArray();

			Console.WriteLine("> creating " + N + " half filled keys");
			var segment = new byte[60];

			for (int i = 0; i < (segment.Length >> 1); i++) segment[i] = (byte) rnd.Next(256);
			using (var trans = db.BeginTransaction(ct))
			{
				for (int i = 0; i < N; i += 1000)
				{
					for (int k = i; k < i + 1000 && k < N; k++)
					{
						trans.Set(keys[k], Slice.Create(segment));
					}
					await trans.CommitAsync();
					Console.Write("\r" + i + " / " + N);
				}
			}

			Console.WriteLine("\rChanging one byte in each of the " + N + " keys...");
			var sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				Console.WriteLine("READ");
				// get all the lists
				var data = await trans.GetBatchAsync(keys);

				// change them
				Console.WriteLine("CHANGE");
				for (int i = 0; i < data.Length; i++)
				{
					var list = data[i].Value.GetBytes();
					list[(list.Length >> 1) + 1] = (byte) rnd.Next(256);
					trans.Set(data[i].Key, Slice.Create(list));
				}

				Console.WriteLine("COMMIT");
				await trans.CommitAsync();
			}
			sw.Stop();

			Console.WriteLine("Took " + sw.Elapsed + " to patch one byte in " + N + " lists (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + " /update)");

		}
		private static async Task BenchUpdateSameKeyLotsOfTimesAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// continuously update same key by adding a little bit more

			Console.WriteLine("Updating the same list " + N + " times...");

			var list = new byte[N];
			var update = Stopwatch.StartNew();
			var key = db.GlobalSpace.Keys.Encode("list");
			for (int i = 0; i < N; i++)
			{
				list[i] = (byte)i;
				using (var trans = db.BeginTransaction(ct))
				{
					trans.Set(key, Slice.Create(list));
					await trans.CommitAsync();
				}
				if (i % 100 == 0) Console.Write("\r> " + i + " / " + N);
			}
			update.Stop();

			Console.WriteLine("\rTook " + update.Elapsed + " to fill a byte[" + N + "] one by one (" + FormatTimeMicro(update.Elapsed.TotalMilliseconds / N) + "/update)");
		}
		private static async Task BenchClearAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// clear a lot of small keys, in a single transaction

			var location = db.Partition.ByKey(Slice.FromAscii("hello"));

			var sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				for (int i = 0; i < N; i++)
				{
					trans.Clear(location.Keys.Encode(i));
				}

				await trans.CommitAsync();
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to clear " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/write)");
		}
		private static async Task BurnerThread(IFdbDatabase db, CancellationToken ct)
		{

			var folder = await db.Directory.CreateOrOpenAsync(new[] { "Benchmarks", "Burner", "Sequential" }, ct);

			await db.WriteAsync((tr) => tr.ClearRange(folder), ct);

			long pos = 0;

			Random rnd;
			lock(Rnd)
			{
				rnd = new Random(Rnd.Next());
			}

			using (var tr = db.BeginTransaction(ct))
			{
				while (!ct.IsCancellationRequested)
				{
					FdbException error = null;
					try
					{
						tr.Reset();

						for(int i = 0; i < N; i++)
						{
							long x = Randomized
								? rnd.Next()
								: pos + i;

							tr.Set(folder.Keys.Encode(x, Suffix), Value);
							Interlocked.Increment(ref Keys);
						}
						pos += N;

						await tr.CommitAsync();
						Interlocked.Increment(ref Transactions);
						Interlocked.Add(ref Bytes, tr.Size);
					}
					catch (FdbException e)
					{
						error = e;
					}

					if (error != null && !ct.IsCancellationRequested)
					{
						await tr.OnErrorAsync(error.Code);
					}
				}
			}

		}
Exemple #31
0
		/// <summary>Performs (x OP y) and ensure that the result is correct</summary>
		private async Task PerformAtomicOperationAndCheck(IFdbDatabase db, Slice key, int x, FdbMutationType type, int y)
		{

			int expected = 0;
			switch(type)
			{
				case FdbMutationType.BitAnd: expected = x & y; break;
				case FdbMutationType.BitOr: expected = x | y; break;
				case FdbMutationType.BitXor: expected = x ^ y; break;
				case FdbMutationType.Add: expected = x + y; break;
				default: Assert.Fail("Invalid operation type"); break;
			}

			// set key = x
			using (var tr = db.BeginTransaction(this.Cancellation))
			{
				tr.Set(key, Slice.FromFixed32(x));
				await tr.CommitAsync();
			}

			// atomic key op y
			using (var tr = db.BeginTransaction(this.Cancellation))
			{
				tr.Atomic(key, Slice.FromFixed32(y), type);
				await tr.CommitAsync();
			}

			// read key
			using (var tr = db.BeginTransaction(this.Cancellation))
			{
				var data = await tr.GetAsync(key);
				Assert.That(data.Count, Is.EqualTo(4), "data.Count");

				Assert.That(data.ToInt32(), Is.EqualTo(expected), "0x{0} {1} 0x{2} = 0x{3}", x.ToString("X8"), type.ToString(), y.ToString("X8"), expected.ToString("X8"));
			}
		}
Exemple #32
0
        /// <summary>Execute a retry loop on this context</summary>
        internal static async Task ExecuteInternal([NotNull] IFdbDatabase db, [NotNull] FdbOperationContext context, [NotNull] Delegate handler, Delegate onDone)
        {
            Contract.Requires(db != null && context != null && handler != null);
            Contract.Requires(context.Shared);

            if (context.Abort)
            {
                throw new InvalidOperationException("Operation context has already been aborted or disposed");
            }

            try
            {
                // make sure to reset everything (in case a context is reused multiple times)
                context.Committed    = false;
                context.Retries      = 0;
                context.BaseDuration = TimeSpan.Zero;
                context.StartedUtc   = DateTime.UtcNow;
                context.Clock.Start();
                //note: we start the clock immediately, but the transaction's 5 seconde max lifetime is actually measured from the first read operation (Get, GetRange, GetReadVersion, etc...)
                // => algorithms that monitor the elapsed duration to rate limit themselves may think that the trans is older than it really is...
                // => we would need to plug into the transaction handler itself to be notified when exactly a read op starts...

                using (var trans = db.BeginTransaction(context.Mode, CancellationToken.None, context))
                {
                    while (!context.Committed && !context.Cancellation.IsCancellationRequested)
                    {
                        FdbException e = null;
                        try
                        {
                            // call the user provided lambda
                            if (handler is Func <IFdbTransaction, Task> funcWritable)
                            {
                                await funcWritable(trans).ConfigureAwait(false);
                            }
                            else if (handler is Action <IFdbTransaction> action)
                            {
                                action(trans);
                            }
                            else if (handler is Func <IFdbReadOnlyTransaction, Task> funcReadOnly)
                            {
                                await funcReadOnly(trans).ConfigureAwait(false);
                            }
                            else
                            {
                                throw new NotSupportedException($"Cannot execute handlers of type {handler.GetType().Name}");
                            }

                            if (context.Abort)
                            {
                                break;
                            }

                            if (!trans.IsReadOnly)
                            {                             // commit the transaction
                                await trans.CommitAsync().ConfigureAwait(false);
                            }

                            // we are done
                            context.Committed = true;

                            if (onDone != null)
                            {
                                if (onDone is Action <IFdbReadOnlyTransaction> action1)
                                {
                                    action1(trans);
                                }
                                else if (onDone is Action <IFdbTransaction> action2)
                                {
                                    action2(trans);
                                }
                                else if (onDone is Func <IFdbReadOnlyTransaction, Task> func1)
                                {
                                    await func1(trans).ConfigureAwait(false);
                                }
                                else if (onDone is Func <IFdbTransaction, Task> func2)
                                {
                                    await func2(trans).ConfigureAwait(false);
                                }
                                else
                                {
                                    throw new NotSupportedException($"Cannot execute completion handler of type {handler.GetType().Name}");
                                }
                            }
                        }
                        catch (FdbException x)
                        {
                            //TODO: will be able to await in catch block in C# 6 !
                            e = x;
                        }

                        if (e != null)
                        {
                            if (Logging.On && Logging.IsVerbose)
                            {
                                Logging.Verbose(String.Format(CultureInfo.InvariantCulture, "fdb: transaction {0} failed with error code {1}", trans.Id, e.Code));
                            }
                            await trans.OnErrorAsync(e.Code).ConfigureAwait(false);

                            if (Logging.On && Logging.IsVerbose)
                            {
                                Logging.Verbose(String.Format(CultureInfo.InvariantCulture, "fdb: transaction {0} can be safely retried", trans.Id));
                            }
                        }

                        // update the base time for the next attempt
                        context.BaseDuration = context.ElapsedTotal;
                        if (context.BaseDuration.TotalSeconds >= 10)
                        {
                            //REVIEW: this may not be a goot idea to spam the logs with long running transactions??
                            if (Logging.On)
                            {
                                Logging.Info(String.Format(CultureInfo.InvariantCulture, "fdb WARNING: long transaction ({0:N1} sec elapsed in transaction lambda function ({1} retries, {2})", context.BaseDuration.TotalSeconds, context.Retries, context.Committed ? "committed" : "not yet committed"));
                            }
                        }

                        context.Retries++;
                    }
                }
                context.Cancellation.ThrowIfCancellationRequested();

                if (context.Abort)
                {
                    throw new OperationCanceledException(context.Cancellation);
                }
            }
            finally
            {
                context.Clock.Stop();
                context.Dispose();
            }
        }
Exemple #33
0
        internal static async Task ExecuteInternal([NotNull] IFdbDatabase db, [NotNull] FdbOperationContext context, [NotNull] Delegate handler, Delegate onDone)
        {
            Contract.Requires(db != null && context != null && handler != null);
            Contract.Requires(context.Shared);

            if (context.Abort)
            {
                throw new InvalidOperationException("Operation context has already been aborted or disposed");
            }

            try
            {
                context.Committed  = false;
                context.Retries    = 0;
                context.StartedUtc = DateTime.UtcNow;
                context.Duration.Start();

                using (var trans = db.BeginTransaction(context.Mode, CancellationToken.None, context))
                {
                    while (!context.Committed && !context.Cancellation.IsCancellationRequested)
                    {
                        FdbException e = null;
                        try
                        {
                            // call the user provided lambda
                            if (handler is Func <IFdbTransaction, Task> )
                            {
                                await((Func <IFdbTransaction, Task>)handler)(trans).ConfigureAwait(false);
                            }
                            else if (handler is Action <IFdbTransaction> )
                            {
                                ((Action <IFdbTransaction>)handler)(trans);
                            }
                            else if (handler is Func <IFdbReadOnlyTransaction, Task> )
                            {
                                await((Func <IFdbReadOnlyTransaction, Task>)handler)(trans).ConfigureAwait(false);
                            }
                            else
                            {
                                throw new NotSupportedException(String.Format("Cannot execute handlers of type {0}", handler.GetType().Name));
                            }

                            if (context.Abort)
                            {
                                break;
                            }

                            if (!trans.IsReadOnly)
                            {                             // commit the transaction
                                await trans.CommitAsync().ConfigureAwait(false);
                            }

                            // we are done
                            context.Committed = true;

                            if (onDone != null)
                            {
                                if (onDone is Action <IFdbReadOnlyTransaction> )
                                {
                                    ((Action <IFdbReadOnlyTransaction>)onDone)(trans);
                                }
                                else if (onDone is Action <IFdbTransaction> )
                                {
                                    ((Action <IFdbTransaction>)onDone)(trans);
                                }
                                else if (onDone is Func <IFdbReadOnlyTransaction, Task> )
                                {
                                    await((Func <IFdbReadOnlyTransaction, Task>)onDone)(trans).ConfigureAwait(false);
                                }
                                else if (onDone is Func <IFdbTransaction, Task> )
                                {
                                    await((Func <IFdbTransaction, Task>)onDone)(trans).ConfigureAwait(false);
                                }
                                else
                                {
                                    throw new NotSupportedException(String.Format("Cannot execute completion handler of type {0}", handler.GetType().Name));
                                }
                            }
                        }
                        catch (FdbException x)
                        {
                            e = x;
                        }

                        if (e != null)
                        {
                            if (Logging.On && Logging.IsVerbose)
                            {
                                Logging.Verbose(String.Format(CultureInfo.InvariantCulture, "fdb: transaction {0} failed with error code {1}", trans.Id, e.Code));
                            }
                            await trans.OnErrorAsync(e.Code).ConfigureAwait(false);

                            if (Logging.On && Logging.IsVerbose)
                            {
                                Logging.Verbose(String.Format(CultureInfo.InvariantCulture, "fdb: transaction {0} can be safely retried", trans.Id));
                            }
                        }

                        if (context.Duration.Elapsed.TotalSeconds >= 1)
                        {
                            if (Logging.On)
                            {
                                Logging.Info(String.Format(CultureInfo.InvariantCulture, "fdb WARNING: long transaction ({0:N1} sec elapsed in transaction lambda function ({1} retries, {2})", context.Duration.Elapsed.TotalSeconds, context.Retries, context.Committed ? "committed" : "not yet committed"));
                            }
                        }

                        context.Retries++;
                    }
                }
                context.Cancellation.ThrowIfCancellationRequested();

                if (context.Abort)
                {
                    throw new OperationCanceledException(context.Cancellation);
                }
            }
            finally
            {
                context.Duration.Stop();
                context.Dispose();
            }
        }
		private static async Task BenchInsertSmallKeysAsync(IFdbDatabase db, int N, int size, CancellationToken ct)
		{
			// insert a lot of small key size, in a single transaction
			var rnd = new Random();
			var tmp = new byte[size];

			var subspace = db.Partition.ByKey("Batch");

			var times = new List<TimeSpan>();
			for (int k = 0; k <= 4; k++)
			{
				var sw = Stopwatch.StartNew();
				using (var trans = db.BeginTransaction(ct))
				{
					rnd.NextBytes(tmp);
					for (int i = 0; i < N; i++)
					{
						tmp[0] = (byte)i;
						tmp[1] = (byte)(i >> 8);
						// (Batch, 1) = [......]
						// (Batch, 2) = [......]
						trans.Set(subspace.Keys.Encode(k * N + i), Slice.Create(tmp));
					}
					await trans.CommitAsync();
				}
				sw.Stop();
				times.Add(sw.Elapsed);
			}
			var min = times.Min();
			Console.WriteLine("[" + Thread.CurrentThread.ManagedThreadId + "] Took " + min.TotalSeconds.ToString("N3", CultureInfo.InvariantCulture) + " sec to insert " + N + " " + size + "-bytes items (" + FormatTimeMicro(min.TotalMilliseconds / N) + "/write)");
		}
		private static async Task TestSimpleTransactionAsync(IFdbDatabase db, CancellationToken ct)
		{
			Console.WriteLine("Starting new transaction...");

			var location = db.GlobalSpace;

			using (var trans = db.BeginTransaction(ct))
			{
				Console.WriteLine("> Transaction ready");

				Console.WriteLine("Getting read version...");
				var readVersion = await trans.GetReadVersionAsync();
				Console.WriteLine("> Read Version = " + readVersion);

				Console.WriteLine("Getting 'hello'...");
				var result = await trans.GetAsync(location.Keys.Encode("hello"));
				if (result.IsNull)
					Console.WriteLine("> hello NOT FOUND");
				else
					Console.WriteLine("> hello = " + result.ToString());

				Console.WriteLine("Setting 'Foo' = 'Bar'");
				trans.Set(location.Keys.Encode("Foo"), Slice.FromString("Bar"));

				Console.WriteLine("Setting 'TopSecret' = rnd(512)");
				var data = new byte[512];
				new Random(1234).NextBytes(data);
				trans.Set(location.Keys.Encode("TopSecret"), Slice.Create(data));

				Console.WriteLine("Committing transaction...");
				await trans.CommitAsync();
				//trans.Commit();
				Console.WriteLine("> Committed!");

				Console.WriteLine("Getting comitted version...");
				var writeVersion = trans.GetCommittedVersion();
				Console.WriteLine("> Commited Version = " + writeVersion);
			}
		}
		public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct)
		{
			// estimate the number of machines...
			Console.WriteLine("# Detecting cluster topology...");
			var servers = await db.QueryAsync(tr => tr
				.WithReadAccessToSystemKeys()
				.GetRange(FdbKeyRange.StartsWith(Fdb.System.ServerList))
				.Select(kvp => new
				{
					Node = kvp.Value.Substring(8, 16).ToHexaString(),
					Machine = kvp.Value.Substring(24, 16).ToHexaString(),
					DataCenter = kvp.Value.Substring(40, 16).ToHexaString()
				}),
				ct
			);

			var numNodes = servers.Select(s => s.Node).Distinct().Count();
			var numMachines = servers.Select(s => s.Machine).Distinct().Count();
			var numDCs = servers.Select(s => s.DataCenter).Distinct().Count();

			Console.WriteLine("# > Found " + numNodes + " process(es) on " + numMachines + " machine(s) in " + numDCs + " datacenter(s)");
			Console.WriteLine("# Reading list of shards...");
			// dump keyServers
			var ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct);
			Console.WriteLine("# > Found " + ranges.Count + " shards:");

			// take a sample
			var rnd = new Random(1234);
			int sz = Math.Max((int)Math.Ceiling(this.Ratio * ranges.Count), 1);
			if (sz > 500) sz = 500; //SAFETY
			if (sz < 50) sz = Math.Max(sz, Math.Min(50, ranges.Count));

			var samples = new List<FdbKeyRange>();
			for (int i = 0; i < sz; i++)
			{
				int p = rnd.Next(ranges.Count);
				samples.Add(ranges[p]);
				ranges.RemoveAt(p);
			}

			Console.WriteLine("# Sampling " + sz + " out of " + ranges.Count + " shards (" + (100.0 * sz / ranges.Count).ToString("N1") + "%) ...");
			Console.WriteLine("{0,9}{1,10}{2,10}{3,10} : K+V size distribution", "Count", "Keys", "Values", "Total");

			var rangeOptions = new FdbRangeOptions { Mode = FdbStreamingMode.WantAll };

			samples = samples.OrderBy(x => x.Begin).ToList();

			long total = 0;
			int workers = Math.Min(numMachines, 8);

			var sw = Stopwatch.StartNew();
			var tasks = new List<Task>();
			while(samples.Count > 0)
			{
				while (tasks.Count < workers && samples.Count > 0)
				{
					var range = samples[0];
					samples.RemoveAt(0);
					tasks.Add(Task.Run(async () =>
					{
						var hh = new RobustHistogram(RobustHistogram.TimeScale.Ticks);

						#region Method 1: get_range everything...

						using (var tr = db.BeginTransaction(ct))
						{
							long keySize = 0;
							long valueSize = 0;
							long count = 0;

							int iter = 0;
							var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin);
							var endSelector = FdbKeySelector.FirstGreaterOrEqual(range.End);
							while (true)
							{
								FdbRangeChunk data = default(FdbRangeChunk);
								FdbException error = null;
								try
								{
									data = await tr.Snapshot.GetRangeAsync(
										beginSelector,
										endSelector,
										rangeOptions,
										iter
									).ConfigureAwait(false);
								}
								catch (FdbException e)
								{
									error = e;
								}

								if (error != null)
								{
									await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
									continue;
								}

								if (data.Count == 0) break;

								count += data.Count;
								foreach (var kvp in data.Chunk)
								{
									keySize += kvp.Key.Count;
									valueSize += kvp.Value.Count;

									hh.Add(TimeSpan.FromTicks(kvp.Key.Count + kvp.Value.Count));
								}

								if (!data.HasMore) break;

								beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key);
								++iter;
							}

							long totalSize = keySize + valueSize;
							Interlocked.Add(ref total, totalSize);

							Console.WriteLine("{0,9}{1,10}{2,10}{3,10} : {4}", count.ToString("N0"), FormatSize(keySize), FormatSize(valueSize), FormatSize(totalSize), hh.GetDistribution(begin: 1, end: 10000, fold:2));
						}
						#endregion

						#region Method 2: estimate the count using key selectors...

						//long counter = await Fdb.System.EstimateCountAsync(db, range, ct);
						//Console.WriteLine("COUNT = " + counter.ToString("N0"));

						#endregion
					}, ct));
				}

				var done = await Task.WhenAny(tasks);
				tasks.Remove(done);
			}

			await Task.WhenAll(tasks);
			sw.Stop();

			Console.WriteLine("> Sampled " + FormatSize(total) + " (" + total.ToString("N0") + " bytes) in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
			Console.WriteLine("> Estimated total size is " + FormatSize(total * ranges.Count / sz));
		}
        public static async Task Sampling(string[] path, IVarTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct)
        {
            double ratio = 0.1d;
            bool   auto  = true;

            if (extras.Count > 0)
            {
                double x = extras.Get <double>(0);
                if (x > 0 && x <= 1)
                {
                    ratio = x;
                }
                auto = false;
            }

            var folder = await TryOpenCurrentDirectoryAsync(path, db, ct);

            KeyRange span;

            if (folder is FdbDirectorySubspace)
            {
                span = KeyRange.StartsWith((folder as FdbDirectorySubspace).Copy().GetPrefix());
                log.WriteLine($"Reading list of shards for /{String.Join("/", path)} under {FdbKey.Dump(span.Begin)} ...");
            }
            else
            {
                log.WriteLine("Reading list of shards for the whole cluster ...");
                span = KeyRange.All;
            }

            // dump keyServers
            var ranges = await Fdb.System.GetChunksAsync(db, span, ct);

            log.WriteLine($"> Found {ranges.Count:N0} shard(s)");

            // take a sample
            var samples = new List <KeyRange>();

            if (ranges.Count <= 32)
            {             // small enough to scan it all
                samples.AddRange(ranges);
                log.WriteLine($"Sampling all {samples.Count:N0} shards ...");
            }
            else
            {             // need to take a random subset
                var rnd = new Random();
                int sz  = Math.Max((int)Math.Ceiling(ratio * ranges.Count), 1);
                if (auto)
                {
                    if (sz > 100)
                    {
                        sz = 100;                               //SAFETY
                    }
                    if (sz < 32)
                    {
                        sz = Math.Max(sz, Math.Min(32, ranges.Count));
                    }
                }

                var population = new List <KeyRange>(ranges);
                for (int i = 0; i < sz; i++)
                {
                    int p = rnd.Next(population.Count);
                    samples.Add(population[p]);
                    population.RemoveAt(p);
                }
                log.WriteLine($"Sampling {samples.Count:N0} out of {ranges.Count:N0} shards ({(100.0 * samples.Count / ranges.Count):N1}%) ...");
            }

            log.WriteLine();
            const string FORMAT_STRING = "{0,9} ║{1,10}{6,6} {2,-29} ║{3,10}{7,7} {4,-37} ║{5,10}";
            const string SCALE_KEY     = "....--------========########M";
            const string SCALE_VAL     = "....--------========########@@@@@@@@M";

            log.WriteLine(FORMAT_STRING, "Count", "Keys", SCALE_KEY, "Values", SCALE_VAL, "Total", "med.", "med.");

            var rangeOptions = new FdbRangeOptions {
                Mode = FdbStreamingMode.WantAll
            };

            samples = samples.OrderBy(x => x.Begin).ToList();

            long globalSize  = 0;
            long globalCount = 0;
            int  workers     = 8;        // Math.Max(4, Environment.ProcessorCount);

            var sw    = Stopwatch.StartNew();
            var tasks = new List <Task>();
            int n     = samples.Count;

            while (samples.Count > 0)
            {
                while (tasks.Count < workers && samples.Count > 0)
                {
                    var range = samples[0];
                    samples.RemoveAt(0);
                    tasks.Add(Task.Run(async() =>
                    {
                        var kk = new RobustHistogram(RobustHistogram.TimeScale.Ticks);
                        var vv = new RobustHistogram(RobustHistogram.TimeScale.Ticks);

                        #region Method 1: get_range everything...

                        using (var tr = db.BeginTransaction(ct))
                        {
                            long keySize   = 0;
                            long valueSize = 0;
                            long count     = 0;

                            int iter          = 0;
                            var beginSelector = KeySelector.FirstGreaterOrEqual(range.Begin);
                            var endSelector   = KeySelector.FirstGreaterOrEqual(range.End);
                            while (true)
                            {
                                FdbRangeChunk data = default(FdbRangeChunk);
                                FdbException error = null;
                                try
                                {
                                    data = await tr.Snapshot.GetRangeAsync(
                                        beginSelector,
                                        endSelector,
                                        rangeOptions,
                                        iter
                                        ).ConfigureAwait(false);
                                }
                                catch (FdbException e)
                                {
                                    error = e;
                                }

                                if (error != null)
                                {
                                    await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
                                    continue;
                                }

                                if (data.Count == 0)
                                {
                                    break;
                                }

                                count += data.Count;
                                foreach (var kvp in data)
                                {
                                    keySize   += kvp.Key.Count;
                                    valueSize += kvp.Value.Count;

                                    kk.Add(TimeSpan.FromTicks(kvp.Key.Count));
                                    vv.Add(TimeSpan.FromTicks(kvp.Value.Count));
                                }

                                if (!data.HasMore)
                                {
                                    break;
                                }

                                beginSelector = KeySelector.FirstGreaterThan(data.Last);
                                ++iter;
                            }

                            long totalSize = keySize + valueSize;
                            Interlocked.Add(ref globalSize, totalSize);
                            Interlocked.Add(ref globalCount, count);

                            lock (log)
                            {
                                log.WriteLine(FORMAT_STRING, count.ToString("N0"), FormatSize(keySize), kk.GetDistribution(begin: 1, end: 12000, fold: 2), FormatSize(valueSize), vv.GetDistribution(begin: 1, end: 120000, fold: 2), FormatSize(totalSize), FormatSize((int)Math.Ceiling(kk.Median)), FormatSize((int)Math.Ceiling(vv.Median)));
                            }
                        }
                        #endregion

                        #region Method 2: estimate the count using key selectors...

                        //long counter = await Fdb.System.EstimateCountAsync(db, range, ct);
                        //Console.WriteLine("COUNT = " + counter.ToString("N0"));

                        #endregion
                    }, ct));
                }

                var done = await Task.WhenAny(tasks);

                tasks.Remove(done);
            }

            await Task.WhenAll(tasks);

            sw.Stop();

            log.WriteLine();
            if (n != ranges.Count)
            {
                log.WriteLine($"Sampled {FormatSize(globalSize)} ({globalSize:N0} bytes) and {globalCount:N0} keys in {sw.Elapsed.TotalSeconds:N1} sec");
                log.WriteLine($"> Estimated total size is {FormatSize(globalSize * ranges.Count / n)}");
            }
            else
            {
                log.WriteLine($"Found {FormatSize(globalSize)} ({globalSize:N0} bytes) and {globalCount:N0} keys in {sw.Elapsed.TotalSeconds:N1} sec");
                // compare to the whole cluster
                ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct);

                log.WriteLine($"> This directory contains ~{(100.0 * n / ranges.Count):N2}% of all data");
            }
            log.WriteLine();
        }
		public async Task RunStatus(IFdbDatabase db, CancellationToken ct)
		{
			var countersLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('C'));
			var idleLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('I'));
			var busyLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('B'));
			var tasksLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('T'));
			var unassignedLocation = this.WorkerPool.Subspace.Partition.ByKey(Slice.FromChar('U'));

			using(var tr = db.BeginTransaction(ct))
			{
				var counters = await tr.Snapshot.GetRange(countersLocation.Keys.ToRange()).Select(kvp => new KeyValuePair<string, long>(countersLocation.Keys.DecodeLast<string>(kvp.Key), kvp.Value.ToInt64())).ToListAsync().ConfigureAwait(false);

				Console.WriteLine("Status at " + DateTimeOffset.Now.ToString("O"));
				foreach(var counter in counters)
				{
					Console.WriteLine(" - " + counter.Key + " = " + counter.Value);
				}

				Console.WriteLine("Dump:");
				Console.WriteLine("> Idle");
				await tr.Snapshot.GetRange(idleLocation.Keys.ToRange()).ForEachAsync((kvp) =>
				{
					Console.WriteLine("- Idle." + idleLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString());
				});
				Console.WriteLine("> Busy");
				await tr.Snapshot.GetRange(busyLocation.Keys.ToRange()).ForEachAsync((kvp) =>
				{
					Console.WriteLine("- Busy." + busyLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString());
				});
				Console.WriteLine("> Unassigned");
				await tr.Snapshot.GetRange(unassignedLocation.Keys.ToRange()).ForEachAsync((kvp) =>
				{
					Console.WriteLine("- Unassigned." + unassignedLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString());
				});
				Console.WriteLine("> Tasks");
				await tr.Snapshot.GetRange(tasksLocation.Keys.ToRange()).ForEachAsync((kvp) =>
				{
					Console.WriteLine("- Tasks." + tasksLocation.Keys.Unpack(kvp.Key) + " = " + kvp.Value.ToAsciiOrHexaString());
				});
				Console.WriteLine("<");
			}
		}
		private static async Task BenchSerialWriteAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// read a lot of small keys, one by one

			var location = db.Partition.ByKey("hello");

			var sw = Stopwatch.StartNew();
			IFdbTransaction trans = null;
			try
			{
				for (int i = 0; i < N; i++)
				{
					if (trans == null) trans = db.BeginTransaction(ct);
					trans.Set(location.Keys.Encode(i), Slice.FromInt32(i));
					if (trans.Size > 100 * 1024)
					{
						await trans.CommitAsync();
						trans.Dispose();
						trans = null;
					}
				}
				await trans.CommitAsync();
			}
			finally
			{
				if (trans != null) trans.Dispose();
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
		}
		public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct)
		{
			double ratio = 0.1d;
			bool auto = true;
			if (extras.Count > 0)
			{
				double x = extras.Get<double>(0);
				if (x > 0 && x <= 1) ratio = x;
				auto = false;
			}

			var folder = await TryOpenCurrentDirectoryAsync(path, db, ct);
			FdbKeyRange span;
			if (folder is FdbDirectorySubspace)
			{
				span = FdbKeyRange.StartsWith((folder as FdbDirectorySubspace).Copy());
				log.WriteLine("Reading list of shards for /{0} under {1} ...", String.Join("/", path), FdbKey.Dump(span.Begin));
			}
			else
			{
				log.WriteLine("Reading list of shards for the whole cluster ...");
				span = FdbKeyRange.All;
			}

			// dump keyServers
			var ranges = await Fdb.System.GetChunksAsync(db, span, ct);
			log.WriteLine("> Found {0:N0} shard(s)", ranges.Count);

			// take a sample
			var samples = new List<FdbKeyRange>();

			if (ranges.Count <= 32)
			{ // small enough to scan it all
				samples.AddRange(ranges);
				log.WriteLine("Sampling all {0:N0} shards ...", samples.Count);
			}
			else
			{ // need to take a random subset
				var rnd = new Random();
				int sz = Math.Max((int)Math.Ceiling(ratio * ranges.Count), 1);
				if (auto)
				{
					if (sz > 100) sz = 100; //SAFETY
					if (sz < 32) sz = Math.Max(sz, Math.Min(32, ranges.Count));
				}

				var population = new List<FdbKeyRange>(ranges);
				for (int i = 0; i < sz; i++)
				{
					int p = rnd.Next(population.Count);
					samples.Add(population[p]);
					population.RemoveAt(p);
				}
				log.WriteLine("Sampling " + samples.Count + " out of " + ranges.Count + " shards (" + (100.0 * samples.Count / ranges.Count).ToString("N1") + "%) ...");
			}

			log.WriteLine();
			const string FORMAT_STRING = "{0,9} ║{1,10}{6,6} {2,-29} ║{3,10}{7,7} {4,-37} ║{5,10}";
			const string SCALE_KEY = "....--------========########M";
			const string SCALE_VAL = "....--------========########@@@@@@@@M";
			log.WriteLine(FORMAT_STRING, "Count", "Keys", SCALE_KEY, "Values", SCALE_VAL, "Total", "med.", "med.");

			var rangeOptions = new FdbRangeOptions { Mode = FdbStreamingMode.WantAll };

			samples = samples.OrderBy(x => x.Begin).ToList();

			long globalSize = 0;
			long globalCount = 0;
			int workers = 8; // Math.Max(4, Environment.ProcessorCount);

			var sw = Stopwatch.StartNew();
			var tasks = new List<Task>();
			int n = samples.Count;
			while (samples.Count > 0)
			{
				while (tasks.Count < workers && samples.Count > 0)
				{
					var range = samples[0];
					samples.RemoveAt(0);
					tasks.Add(Task.Run(async () =>
					{
						var kk = new RobustHistogram(RobustHistogram.TimeScale.Ticks);
						var vv = new RobustHistogram(RobustHistogram.TimeScale.Ticks);

						#region Method 1: get_range everything...

						using (var tr = db.BeginTransaction(ct))
						{
							long keySize = 0;
							long valueSize = 0;
							long count = 0;

							int iter = 0;
							var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin);
							var endSelector = FdbKeySelector.FirstGreaterOrEqual(range.End);
							while (true)
							{
								FdbRangeChunk data = default(FdbRangeChunk);
								FdbException error = null;
								try
								{
									data = await tr.Snapshot.GetRangeAsync(
										beginSelector,
										endSelector,
										rangeOptions,
										iter
									).ConfigureAwait(false);
								}
								catch (FdbException e)
								{
									error = e;
								}

								if (error != null)
								{
									await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
									continue;
								}

								if (data.Count == 0) break;

								count += data.Count;
								foreach (var kvp in data.Chunk)
								{
									keySize += kvp.Key.Count;
									valueSize += kvp.Value.Count;

									kk.Add(TimeSpan.FromTicks(kvp.Key.Count));
									vv.Add(TimeSpan.FromTicks(kvp.Value.Count));
								}

								if (!data.HasMore) break;

								beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key);
								++iter;
							}

							long totalSize = keySize + valueSize;
							Interlocked.Add(ref globalSize, totalSize);
							Interlocked.Add(ref globalCount, count);

							lock (log)
							{
								log.WriteLine(FORMAT_STRING, count.ToString("N0"), FormatSize(keySize), kk.GetDistribution(begin: 1, end: 12000, fold: 2), FormatSize(valueSize), vv.GetDistribution(begin: 1, end: 120000, fold: 2), FormatSize(totalSize), FormatSize((int)Math.Ceiling(kk.Median)), FormatSize((int)Math.Ceiling(vv.Median)));
							}
						}
						#endregion

						#region Method 2: estimate the count using key selectors...

						//long counter = await Fdb.System.EstimateCountAsync(db, range, ct);
						//Console.WriteLine("COUNT = " + counter.ToString("N0"));

						#endregion
					}, ct));
				}

				var done = await Task.WhenAny(tasks);
				tasks.Remove(done);
			}

			await Task.WhenAll(tasks);
			sw.Stop();

			log.WriteLine();
			if (n != ranges.Count)
			{
				log.WriteLine("Sampled " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
				log.WriteLine("> Estimated total size is " + FormatSize(globalSize * ranges.Count / n));
			}
			else
			{
				log.WriteLine("Found " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
				// compare to the whole cluster
				ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct);
				log.WriteLine("> This directory contains ~{0:N2}% of all data", (100.0 * n / ranges.Count));
			}
			log.WriteLine();
		}
		private static async Task BenchSerialReadAsync(IFdbDatabase db, int N, CancellationToken ct)
		{

			Console.WriteLine("Reading " + N + " keys (serial, slow!)");

			// read a lot of small keys, one by one

			var location = db.Partition.ByKey("hello");

			var sw = Stopwatch.StartNew();
			for (int k = 0; k < N; k += 1000)
			{
				using (var trans = db.BeginTransaction(ct))
				{
					for (int i = k; i < N && i < k + 1000; i++)
					{
						var result = await trans.GetAsync(location.Keys.Encode(i));
					}
				}
				Console.Write(".");
			}
			Console.WriteLine();
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
		}
        public async Task Run(IFdbDatabase db, TextWriter log, CancellationToken ct)
        {
            // estimate the number of machines...
            Console.WriteLine("# Detecting cluster topology...");
            var servers = await db.QueryAsync(tr => tr
                                              .WithReadAccessToSystemKeys()
                                              .GetRange(FdbKeyRange.StartsWith(Fdb.System.ServerList))
                                              .Select(kvp => new
            {
                Node       = kvp.Value.Substring(8, 16).ToHexaString(),
                Machine    = kvp.Value.Substring(24, 16).ToHexaString(),
                DataCenter = kvp.Value.Substring(40, 16).ToHexaString()
            }),
                                              ct
                                              );

            var numNodes    = servers.Select(s => s.Node).Distinct().Count();
            var numMachines = servers.Select(s => s.Machine).Distinct().Count();
            var numDCs      = servers.Select(s => s.DataCenter).Distinct().Count();

            Console.WriteLine("# > Found " + numNodes + " process(es) on " + numMachines + " machine(s) in " + numDCs + " datacenter(s)");
            Console.WriteLine("# Reading list of shards...");
            // dump keyServers
            var ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct);

            Console.WriteLine("# > Found " + ranges.Count + " shards:");

            // take a sample
            var rnd = new Random(1234);
            int sz  = Math.Max((int)Math.Ceiling(this.Ratio * ranges.Count), 1);

            if (sz > 500)
            {
                sz = 500;                       //SAFETY
            }
            if (sz < 50)
            {
                sz = Math.Max(sz, Math.Min(50, ranges.Count));
            }

            var samples = new List <FdbKeyRange>();

            for (int i = 0; i < sz; i++)
            {
                int p = rnd.Next(ranges.Count);
                samples.Add(ranges[p]);
                ranges.RemoveAt(p);
            }

            Console.WriteLine("# Sampling " + sz + " out of " + ranges.Count + " shards (" + (100.0 * sz / ranges.Count).ToString("N1") + "%) ...");
            Console.WriteLine("{0,9}{1,10}{2,10}{3,10} : K+V size distribution", "Count", "Keys", "Values", "Total");

            var rangeOptions = new FdbRangeOptions {
                Mode = FdbStreamingMode.WantAll
            };

            samples = samples.OrderBy(x => x.Begin).ToList();

            long total   = 0;
            int  workers = Math.Min(numMachines, 8);

            var sw    = Stopwatch.StartNew();
            var tasks = new List <Task>();

            while (samples.Count > 0)
            {
                while (tasks.Count < workers && samples.Count > 0)
                {
                    var range = samples[0];
                    samples.RemoveAt(0);
                    tasks.Add(Task.Run(async() =>
                    {
                        var hh = new RobustHistogram(RobustHistogram.TimeScale.Ticks);

                        #region Method 1: get_range everything...

                        using (var tr = db.BeginTransaction(ct))
                        {
                            long keySize   = 0;
                            long valueSize = 0;
                            long count     = 0;

                            int iter          = 0;
                            var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin);
                            var endSelector   = FdbKeySelector.FirstGreaterOrEqual(range.End);
                            while (true)
                            {
                                FdbRangeChunk data = default(FdbRangeChunk);
                                FdbException error = null;
                                try
                                {
                                    data = await tr.Snapshot.GetRangeAsync(
                                        beginSelector,
                                        endSelector,
                                        rangeOptions,
                                        iter
                                        ).ConfigureAwait(false);
                                }
                                catch (FdbException e)
                                {
                                    error = e;
                                }

                                if (error != null)
                                {
                                    await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
                                    continue;
                                }

                                if (data.Count == 0)
                                {
                                    break;
                                }

                                count += data.Count;
                                foreach (var kvp in data.Chunk)
                                {
                                    keySize   += kvp.Key.Count;
                                    valueSize += kvp.Value.Count;

                                    hh.Add(TimeSpan.FromTicks(kvp.Key.Count + kvp.Value.Count));
                                }

                                if (!data.HasMore)
                                {
                                    break;
                                }

                                beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key);
                                ++iter;
                            }

                            long totalSize = keySize + valueSize;
                            Interlocked.Add(ref total, totalSize);

                            Console.WriteLine("{0,9}{1,10}{2,10}{3,10} : {4}", count.ToString("N0"), FormatSize(keySize), FormatSize(valueSize), FormatSize(totalSize), hh.GetDistribution(begin: 1, end: 10000, fold: 2));
                        }
                        #endregion

                        #region Method 2: estimate the count using key selectors...

                        //long counter = await Fdb.System.EstimateCountAsync(db, range, ct);
                        //Console.WriteLine("COUNT = " + counter.ToString("N0"));

                        #endregion
                    }, ct));
                }

                var done = await Task.WhenAny(tasks);

                tasks.Remove(done);
            }

            await Task.WhenAll(tasks);

            sw.Stop();

            Console.WriteLine("> Sampled " + FormatSize(total) + " (" + total.ToString("N0") + " bytes) in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
            Console.WriteLine("> Estimated total size is " + FormatSize(total * ranges.Count / sz));
        }
		private static async Task BenchConcurrentReadAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// read a lot of small keys, concurrently

			Console.WriteLine("Reading " + N + " keys (concurrent)");

			var location = db.Partition.ByKey("hello");

			var keys = Enumerable.Range(0, N).Select(i => location.Keys.Encode(i)).ToArray();

			var sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				var results = await Task.WhenAll(Enumerable
					.Range(0, keys.Length)
					.Select((i) => trans.GetAsync(keys[i]))
				);
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");

			sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				var results = await trans.GetBatchAsync(keys);
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + keys.Length + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");
		}
		protected async Task DeleteSubspace(IFdbDatabase db, IFdbSubspace subspace)
		{
			using (var tr = db.BeginTransaction(this.Cancellation))
			{
				tr.ClearRange(subspace);
				await tr.CommitAsync();
			}
		}
        private async Task <bool> FulfillConflictedPops([NotNull] IFdbDatabase db, CancellationToken ct)
        {
            const int numPops = 100;

            using (var tr = db.BeginTransaction(ct))
            {
#if DEBUG
                tr.Annotate("FulfillConflictedPops");
#endif

                var ts = await Task.WhenAll(
                    GetWaitingPopsAsync(tr.Snapshot, numPops),
                    GetItemsAsync(tr.Snapshot, numPops)
                    ).ConfigureAwait(false);

                var pops  = ts[0];
                var items = ts[1];
#if DEBUG
                tr.Annotate("pops: {0}, items: {1}", pops.Count, items.Count);
#endif

                var tasks = new List <Task>(pops.Count);

                int i = 0;
                int n = Math.Min(pops.Count, items.Count);
                while (i < n)
                {
                    var pop = pops[i];
                    var kvp = items[i];

                    var key        = this.ConflictedPop.Keys.Unpack(pop.Key);
                    var storageKey = this.ConflictedItemKey(key[1]);

                    tr.Set(storageKey, kvp.Value);
                    //TODO: could this be replaced with a read conflict range ? (not async)
                    tasks.Add(tr.GetAsync(kvp.Key));
                    tasks.Add(tr.GetAsync(pop.Key));
                    tr.Clear(pop.Key);
                    tr.Clear(kvp.Key);

                    ++i;
                }

                if (i < pops.Count)
                {
                    while (i < pops.Count)
                    {
                        //TODO: could this be replaced with a read conflict range ? (not async)
                        tasks.Add(tr.GetAsync(pops[i].Key));
                        tr.Clear(pops[i].Key);
                        ++i;
                    }
                }

                // wait for all pending reads
                await Task.WhenAll(tasks).ConfigureAwait(false);

                // commit
                await tr.CommitAsync().ConfigureAwait(false);

                return(pops.Count < numPops);
            }
        }
 public static IFdbTransaction BeginTransaction([NotNull] this IFdbDatabase db, CancellationToken cancellationToken)
 {
     Contract.NotNull(db, nameof(db));
     return(db.BeginTransaction(FdbTransactionMode.Default, cancellationToken, default(FdbOperationContext)));
 }
        private async Task <Optional <T> > PopHighContentionAsync([NotNull] IFdbDatabase db, CancellationToken ct)
        {
            int   backOff = 10;
            Slice waitKey = Slice.Empty;

            ct.ThrowIfCancellationRequested();

            using (var tr = db.BeginTransaction(ct))
            {
#if DEBUG
                tr.Annotate("PopHighContention()");
#endif

                FdbException error = null;
                try
                {
                    // Check if there are other people waiting to be popped. If so, we cannot pop before them.
                    waitKey = await AddConflictedPopAsync(tr, forced : false).ConfigureAwait(false);

                    if (waitKey.IsNull)
                    {                     // No one else was waiting to be popped
                        var item = await PopSimpleAsync(tr).ConfigureAwait(false);

                        await tr.CommitAsync().ConfigureAwait(false);

                        return(item);
                    }
                    else
                    {
                        await tr.CommitAsync().ConfigureAwait(false);
                    }
                }
                catch (FdbException e)
                {
                    // note: cannot await inside a catch(..) block, so flag the error and process it below
                    error = e;
                }

                if (error != null)
                {                 // If we didn't succeed, then register our pop request
                    waitKey = await AddConflictedPopAsync(db, forced : true, ct : ct).ConfigureAwait(false);
                }

                // The result of the pop will be stored at this key once it has been fulfilled
                var resultKey = ConflictedItemKey(this.ConflictedPop.Keys.DecodeLast <Slice>(waitKey));

                tr.Reset();

                // Attempt to fulfill outstanding pops and then poll the database
                // checking if we have been fulfilled

                while (!ct.IsCancellationRequested)
                {
                    error = null;
                    try
                    {
                        while (!(await FulfillConflictedPops(db, ct).ConfigureAwait(false)))
                        {
                            //NOP ?
                        }
                    }
                    catch (FdbException e)
                    {
                        // cannot await in catch(..) block so process it below
                        error = e;
                    }

                    if (error != null && error.Code != FdbError.NotCommitted)
                    {
                        // If the error is 1020 (not_committed), then there is a good chance
                        // that somebody else has managed to fulfill some outstanding pops. In
                        // that case, we proceed to check whether our request has been fulfilled.
                        // Otherwise, we handle the error in the usual fashion.

                        await tr.OnErrorAsync(error.Code).ConfigureAwait(false);

                        continue;
                    }

                    error = null;
                    try
                    {
                        tr.Reset();

                        var sw = System.Diagnostics.Stopwatch.StartNew();

                        var tmp = await tr.GetValuesAsync(new Slice[] { waitKey, resultKey }).ConfigureAwait(false);

                        var value  = tmp[0];
                        var result = tmp[1];

                        // If waitKey is present, then we have not been fulfilled
                        if (value.HasValue)
                        {
#if DEBUG
                            tr.Annotate("Wait {0} ms : {1} / {2}", backOff, Environment.TickCount, sw.ElapsedTicks);
#endif
                            //TODO: we should rewrite this using Watches !
                            await Task.Delay(backOff, ct).ConfigureAwait(false);

#if DEBUG
                            tr.Annotate("After wait : {0} / {1}", Environment.TickCount, sw.ElapsedTicks);
#endif
                            backOff = Math.Min(1000, backOff * 2);
                            continue;
                        }

                        if (result.IsNullOrEmpty)
                        {
                            return(default(Optional <T>));
                        }

                        tr.Clear(resultKey);
                        await tr.CommitAsync().ConfigureAwait(false);

                        return(this.Encoder.DecodeValue(result));
                    }
                    catch (FdbException e)
                    {
                        error = e;
                    }

                    if (error != null)
                    {
                        await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
                    }
                }

                ct.ThrowIfCancellationRequested();
                // make the compiler happy
                throw new InvalidOperationException();
            }
        }