private static async Task BenchConcurrentReadAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // read a lot of small keys, concurrently

            Console.WriteLine("Reading " + N + " keys (concurrent)");

            var location = db.Partition("hello");

            var keys = Enumerable.Range(0, N).Select(i => location.Pack(i)).ToArray();

            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                var results = await Task.WhenAll(Enumerable
                                                 .Range(0, keys.Length)
                                                 .Select((i) => trans.GetAsync(keys[i]))
                                                 );
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");

            sw = Stopwatch.StartNew();
            using (var trans = db.BeginTransaction(ct))
            {
                var results = await trans.GetBatchAsync(keys);
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + keys.Length + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");
        }
        private static async Task BenchSerialReadAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            Console.WriteLine("Reading " + N + " keys (serial, slow!)");

            // read a lot of small keys, one by one

            var location = db.Partition("hello");

            var sw = Stopwatch.StartNew();

            for (int k = 0; k < N; k += 1000)
            {
                using (var trans = db.BeginTransaction(ct))
                {
                    for (int i = k; i < N && i < k + 1000; i++)
                    {
                        var result = await trans.GetAsync(location.Pack(i));
                    }
                }
                Console.Write(".");
            }
            Console.WriteLine();
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
        }
        private static async Task BenchInsertSmallKeysAsync(IFdbDatabase db, int N, int size, CancellationToken ct)
        {
            // insert a lot of small key size, in a single transaction
            var rnd = new Random();
            var tmp = new byte[size];

            var subspace = db.Partition("Batch");

            var times = new List <TimeSpan>();

            for (int k = 0; k <= 4; k++)
            {
                var sw = Stopwatch.StartNew();
                using (var trans = db.BeginTransaction(ct))
                {
                    rnd.NextBytes(tmp);
                    for (int i = 0; i < N; i++)
                    {
                        tmp[0] = (byte)i;
                        tmp[1] = (byte)(i >> 8);
                        // (Batch, 1) = [......]
                        // (Batch, 2) = [......]
                        trans.Set(subspace.Pack(k * N + i), Slice.Create(tmp));
                    }
                    await trans.CommitAsync();
                }
                sw.Stop();
                times.Add(sw.Elapsed);
            }
            var min = times.Min();

            Console.WriteLine("[" + Thread.CurrentThread.ManagedThreadId + "] Took " + min.TotalSeconds.ToString("N3", CultureInfo.InvariantCulture) + " sec to insert " + N + " " + size + "-bytes items (" + FormatTimeMicro(min.TotalMilliseconds / N) + "/write)");
        }
        private static async Task BenchUpdateLotsOfKeysAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // change one byte in a large number of keys

            var location = db.Partition("lists");

            var rnd  = new Random();
            var keys = Enumerable.Range(0, N).Select(x => location.Pack(x)).ToArray();

            Console.WriteLine("> creating " + N + " half filled keys");
            var segment = new byte[60];

            for (int i = 0; i < (segment.Length >> 1); i++)
            {
                segment[i] = (byte)rnd.Next(256);
            }
            using (var trans = db.BeginTransaction(ct))
            {
                for (int i = 0; i < N; i += 1000)
                {
                    for (int k = i; k < i + 1000 && k < N; k++)
                    {
                        trans.Set(keys[k], Slice.Create(segment));
                    }
                    await trans.CommitAsync();

                    Console.Write("\r" + i + " / " + N);
                }
            }

            Console.WriteLine("\rChanging one byte in each of the " + N + " keys...");
            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                Console.WriteLine("READ");
                // get all the lists
                var data = await trans.GetBatchAsync(keys);

                // change them
                Console.WriteLine("CHANGE");
                for (int i = 0; i < data.Length; i++)
                {
                    var list = data[i].Value.GetBytes();
                    list[(list.Length >> 1) + 1] = (byte)rnd.Next(256);
                    trans.Set(data[i].Key, Slice.Create(list));
                }

                Console.WriteLine("COMMIT");
                await trans.CommitAsync();
            }
            sw.Stop();

            Console.WriteLine("Took " + sw.Elapsed + " to patch one byte in " + N + " lists (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + " /update)");
        }
        private static async Task BenchMergeSortAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct)
        {
            // create multiple lists
            var location = db.Partition("MergeSort");
            await db.ClearRangeAsync(location, ct);

            var sources = Enumerable.Range(0, K).Select(i => 'A' + i).ToArray();
            var rnd     = new Random();

            // insert a number of random number lists
            Console.Write("> Inserting " + (K * N).ToString("N0", CultureInfo.InvariantCulture) + " items... ");
            foreach (var source in sources)
            {
                using (var tr = db.BeginTransaction(ct))
                {
                    var list = location.Partition(source);
                    for (int i = 0; i < N; i++)
                    {
                        tr.Set(list.Pack(rnd.Next()), Slice.FromInt32(i));
                    }
                    await tr.CommitAsync();
                }
            }
            Console.WriteLine("Done");

            // merge/sort them to get only one (hopefully sorted) list

            using (var tr = db.BeginTransaction(ct))
            {
                var mergesort = tr
                                .MergeSort(
                    sources.Select(source => FdbKeySelectorPair.StartsWith(location.Pack(source))),
                    (kvp) => location.UnpackLast <int>(kvp.Key)
                    )
                                .Take(B)
                                .Select(kvp => location.Unpack(kvp.Key));

                Console.Write("> MergeSort with limit " + B + "... ");
                var sw      = Stopwatch.StartNew();
                var results = await mergesort.ToListAsync();

                sw.Stop();
                Console.WriteLine("Done");

                Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to merge sort " + results.Count + " results from " + K + " lists of " + N + " items each");

                //foreach (var result in results)
                //{
                //	Console.WriteLine(result.Get<int>(-1));
                //}
            }
        }
        private static async Task BenchClearAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // clear a lot of small keys, in a single transaction

            var location = db.Partition(Slice.FromAscii("hello"));

            var sw = Stopwatch.StartNew();

            using (var trans = db.BeginTransaction(ct))
            {
                for (int i = 0; i < N; i++)
                {
                    trans.Clear(location.Pack(i));
                }

                await trans.CommitAsync();
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to clear " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/write)");
        }
        private static async Task BenchSerialWriteAsync(IFdbDatabase db, int N, CancellationToken ct)
        {
            // read a lot of small keys, one by one

            var location = db.Partition("hello");

            var             sw    = Stopwatch.StartNew();
            IFdbTransaction trans = null;

            try
            {
                for (int i = 0; i < N; i++)
                {
                    if (trans == null)
                    {
                        trans = db.BeginTransaction(ct);
                    }
                    trans.Set(location.Pack(i), Slice.FromInt32(i));
                    if (trans.Size > 100 * 1024)
                    {
                        await trans.CommitAsync();

                        trans.Dispose();
                        trans = null;
                    }
                }
                await trans.CommitAsync();
            }
            finally
            {
                if (trans != null)
                {
                    trans.Dispose();
                }
            }
            sw.Stop();
            Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
        }
		private static async Task BenchInsertSmallKeysAsync(IFdbDatabase db, int N, int size, CancellationToken ct)
		{
			// insert a lot of small key size, in a single transaction
			var rnd = new Random();
			var tmp = new byte[size];

			var subspace = db.Partition("Batch");

			var times = new List<TimeSpan>();
			for (int k = 0; k <= 4; k++)
			{
				var sw = Stopwatch.StartNew();
				using (var trans = db.BeginTransaction(ct))
				{
					rnd.NextBytes(tmp);
					for (int i = 0; i < N; i++)
					{
						tmp[0] = (byte)i;
						tmp[1] = (byte)(i >> 8);
						// (Batch, 1) = [......]
						// (Batch, 2) = [......]
						trans.Set(subspace.Pack(k * N + i), Slice.Create(tmp));
					}
					await trans.CommitAsync();
				}
				sw.Stop();
				times.Add(sw.Elapsed);
			}
			var min = times.Min();
			Console.WriteLine("[" + Thread.CurrentThread.ManagedThreadId + "] Took " + min.TotalSeconds.ToString("N3", CultureInfo.InvariantCulture) + " sec to insert " + N + " " + size + "-bytes items (" + FormatTimeMicro(min.TotalMilliseconds / N) + "/write)");
		}
        private static async Task BenchBulkInsertThenBulkReadAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct, bool instrumented = false)
        {
            // test that we can bulk write / bulk read

            var timings = instrumented ? new List <KeyValuePair <double, double> >() : null;

            // put test values inside a namespace
            var subspace = db.Partition("BulkInsert");

            // cleanup everything
            using (var tr = db.BeginTransaction(ct))
            {
                tr.ClearRange(subspace);
                await tr.CommitAsync();
            }

            // insert all values (batched)
            Console.WriteLine("Inserting " + N.ToString("N0", CultureInfo.InvariantCulture) + " keys: ");
            var  insert  = Stopwatch.StartNew();
            int  batches = 0;
            long bytes   = 0;

            var start = Stopwatch.StartNew();

            var tasks = new List <Task>();

            foreach (var worker in FdbKey.Batched(0, N, K, B))
            {
                //hack
                tasks.Add(Task.Run(async() =>
                {
                    foreach (var chunk in worker)
                    {
                        using (var tr = db.BeginTransaction(ct))
                        {
                            int z = 0;
                            foreach (int i in Enumerable.Range(chunk.Key, chunk.Value))
                            {
                                tr.Set(subspace.Pack(i), Slice.Create(new byte[256]));
                                z++;
                            }

                            //Console.Write("#");
                            //Console.WriteLine("  Commiting batch (" + tr.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) " + z + " keys");
                            var localStart = start.Elapsed.TotalSeconds;
                            await tr.CommitAsync();
                            var localDuration = start.Elapsed.TotalSeconds - localStart;
                            if (instrumented)
                            {
                                lock (timings) { timings.Add(new KeyValuePair <double, double>(localStart, localDuration)); }
                            }
                            Interlocked.Increment(ref batches);
                            Interlocked.Add(ref bytes, tr.Size);
                        }
                    }
                }, ct));
            }
            await Task.WhenAll(tasks);

            insert.Stop();
            Console.WriteLine("Committed " + batches + " batches in " + FormatTimeMilli(insert.Elapsed.TotalMilliseconds) + " (" + FormatTimeMilli(insert.Elapsed.TotalMilliseconds / batches) + " / batch, " + FormatTimeMicro(insert.Elapsed.TotalMilliseconds / N) + " / item");
            Console.WriteLine("Throughput " + FormatThroughput(bytes, insert.Elapsed.TotalSeconds));

            if (instrumented)
            {
                var sb = new StringBuilder();
                foreach (var kvp in timings)
                {
                    sb.Append(kvp.Key.ToString()).Append(';').Append((kvp.Key + kvp.Value).ToString()).Append(';').Append(kvp.Value.ToString()).AppendLine();
                }
#if DEBUG
                System.IO.File.WriteAllText(@"c:\temp\fdb\timings_" + N + "_" + K + "_" + B + ".csv", sb.ToString());
#else
                Console.WriteLine(sb.ToString());
#endif
            }

            // Read values

            using (var tr = db.BeginTransaction(ct))
            {
                Console.WriteLine("Reading all keys...");
                var sw    = Stopwatch.StartNew();
                var items = await tr.GetRangeStartsWith(subspace).ToListAsync();

                sw.Stop();
                Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to get " + items.Count.ToString("N0", CultureInfo.InvariantCulture) + " results");
            }
        }
		private static async Task BenchSerialWriteAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// read a lot of small keys, one by one

			var location = db.Partition("hello");

			var sw = Stopwatch.StartNew();
			IFdbTransaction trans = null;
			try
			{
				for (int i = 0; i < N; i++)
				{
					if (trans == null) trans = db.BeginTransaction(ct);
					trans.Set(location.Pack(i), Slice.FromInt32(i));
					if (trans.Size > 100 * 1024)
					{
						await trans.CommitAsync();
						trans.Dispose();
						trans = null;
					}
				}
				await trans.CommitAsync();
			}
			finally
			{
				if (trans != null) trans.Dispose();
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
		}
		private static async Task BenchSerialReadAsync(IFdbDatabase db, int N, CancellationToken ct)
		{

			Console.WriteLine("Reading " + N + " keys (serial, slow!)");

			// read a lot of small keys, one by one

			var location = db.Partition("hello");

			var sw = Stopwatch.StartNew();
			for (int k = 0; k < N; k += 1000)
			{
				using (var trans = db.BeginTransaction(ct))
				{
					for (int i = k; i < N && i < k + 1000; i++)
					{
						var result = await trans.GetAsync(location.Pack(i));
					}
				}
				Console.Write(".");
			}
			Console.WriteLine();
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/read)");
		}
		private static async Task BenchConcurrentReadAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// read a lot of small keys, concurrently

			Console.WriteLine("Reading " + N + " keys (concurrent)");

			var location = db.Partition("hello");

			var keys = Enumerable.Range(0, N).Select(i => location.Pack(i)).ToArray();

			var sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				var results = await Task.WhenAll(Enumerable
					.Range(0, keys.Length)
					.Select((i) => trans.GetAsync(keys[i]))
				);
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");

			sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				var results = await trans.GetBatchAsync(keys);
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to read " + keys.Length + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / keys.Length) + "/read)");
		}
        private static async Task BenchConcurrentInsert(IFdbDatabase db, int k, int N, int size, CancellationToken ct)
        {
            // insert a lot of small key size, in multiple batch running in //
            // k = number of threads
            // N = total number of keys
            // size = value size (bytes)
            // n = keys per batch (N/k)

            int n = N / k;

            // make sure that N is multiple of k
            N = n * k;

            Console.WriteLine("Inserting " + N + " keys in " + k + " batches of " + n + " with " + size + "-bytes values...");

            // store every key under ("Batch", i)
            var subspace = db.Partition("Batch");
            // total estimated size of all transactions
            long totalPayloadSize = 0;

            var tasks = new List <Task>();
            var sem   = new ManualResetEventSlim();

            for (int j = 0; j < k; j++)
            {
                int offset = j;
                // spin a task for the batch using TaskCreationOptions.LongRunning to make sure it runs in its own thread
                tasks.Add(Task.Factory.StartNew(async() =>
                {
                    var rnd = new Random(1234567 * j);
                    var tmp = new byte[size];
                    rnd.NextBytes(tmp);

                    // block until all threads are ready
                    sem.Wait();

                    var x = Stopwatch.StartNew();
                    using (var trans = db.BeginTransaction(ct))
                    {
                        x.Stop();
                        Console.WriteLine("> [" + offset + "] got transaction in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

                        // package the keys...
                        x.Restart();
                        for (int i = 0; i < n; i++)
                        {
                            // change the value a little bit
                            tmp[0] = (byte)i;
                            tmp[1] = (byte)(i >> 8);

                            // ("Batch", batch_index, i) = [..random..]
                            trans.Set(subspace.Pack(i), Slice.Create(tmp));
                        }
                        x.Stop();
                        Console.WriteLine("> [" + offset + "] packaged " + n + " keys (" + trans.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

                        // commit the transaction
                        x.Restart();
                        await trans.CommitAsync();
                        x.Stop();
                        Console.WriteLine("> [" + offset + "] committed " + n + " keys (" + trans.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

                        Interlocked.Add(ref totalPayloadSize, trans.Size);
                    }
                }, TaskCreationOptions.LongRunning).Unwrap());
            }
            // give time for threads to be ready
            await Task.Delay(100);

            // start
            var sw = Stopwatch.StartNew();

            sem.Set();

            // wait for total completion
            await Task.WhenAll(tasks);

            sw.Stop();
            Console.WriteLine("* Total: " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + ", " + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + " / write, " + FormatThroughput(totalPayloadSize, sw.Elapsed.TotalSeconds));
            Console.WriteLine();
        }
		private static async Task BenchConcurrentInsert(IFdbDatabase db, int k, int N, int size, CancellationToken ct)
		{
			// insert a lot of small key size, in multiple batch running in //
			// k = number of threads
			// N = total number of keys
			// size = value size (bytes)
			// n = keys per batch (N/k)

			int n = N / k;
			// make sure that N is multiple of k
			N = n * k;

			Console.WriteLine("Inserting " + N + " keys in " + k + " batches of " + n + " with " + size + "-bytes values...");

			// store every key under ("Batch", i)
			var subspace = db.Partition("Batch");
			// total estimated size of all transactions
			long totalPayloadSize = 0;

			var tasks = new List<Task>();
			var sem = new ManualResetEventSlim();
			for (int j = 0; j < k; j++)
			{
				int offset = j;
				// spin a task for the batch using TaskCreationOptions.LongRunning to make sure it runs in its own thread
				tasks.Add(Task.Factory.StartNew(async () =>
				{
					var rnd = new Random(1234567 * j);
					var tmp = new byte[size];
					rnd.NextBytes(tmp);

					// block until all threads are ready
					sem.Wait();

					var x = Stopwatch.StartNew();
					using (var trans = db.BeginTransaction(ct))
					{
						x.Stop();
						Console.WriteLine("> [" + offset + "] got transaction in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

						// package the keys...
						x.Restart();
						for (int i = 0; i < n; i++)
						{
							// change the value a little bit
							tmp[0] = (byte)i;
							tmp[1] = (byte)(i >> 8);

							// ("Batch", batch_index, i) = [..random..]
							trans.Set(subspace.Pack(i), Slice.Create(tmp));
						}
						x.Stop();
						Console.WriteLine("> [" + offset + "] packaged " + n + " keys (" + trans.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

						// commit the transaction
						x.Restart();
						await trans.CommitAsync();
						x.Stop();
						Console.WriteLine("> [" + offset + "] committed " + n + " keys (" + trans.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) in " + FormatTimeMilli(x.Elapsed.TotalMilliseconds));

						Interlocked.Add(ref totalPayloadSize, trans.Size);
					}

				}, TaskCreationOptions.LongRunning).Unwrap());
			}
			// give time for threads to be ready
			await Task.Delay(100);

			// start
			var sw = Stopwatch.StartNew();
			sem.Set();

			// wait for total completion
			await Task.WhenAll(tasks);
			sw.Stop();
			Console.WriteLine("* Total: " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + ", " + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + " / write, " + FormatThroughput(totalPayloadSize, sw.Elapsed.TotalSeconds));
			Console.WriteLine();
		}
		private static async Task BenchMergeSortAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct)
		{
			// create multiple lists
			var location = db.Partition("MergeSort");
			await db.ClearRangeAsync(location, ct);

			var sources = Enumerable.Range(0, K).Select(i => 'A' + i).ToArray();
			var rnd = new Random();

			// insert a number of random number lists
			Console.Write("> Inserting " + (K * N).ToString("N0", CultureInfo.InvariantCulture) + " items... ");
			foreach (var source in sources)
			{
				using (var tr = db.BeginTransaction(ct))
				{
					var list = location.Partition(source);
					for (int i = 0; i < N; i++)
					{
						tr.Set(list.Pack(rnd.Next()), Slice.FromInt32(i));
					}
					await tr.CommitAsync();
				}
			}
			Console.WriteLine("Done");

			// merge/sort them to get only one (hopefully sorted) list

			using (var tr = db.BeginTransaction(ct))
			{
				var mergesort = tr
					.MergeSort(
						sources.Select(source => FdbKeySelectorPair.StartsWith(location.Pack(source))),
						(kvp) => location.UnpackLast<int>(kvp.Key)
					)
					.Take(B)
					.Select(kvp => location.Unpack(kvp.Key));

				Console.Write("> MergeSort with limit " + B + "... ");
				var sw = Stopwatch.StartNew();
				var results = await mergesort.ToListAsync();
				sw.Stop();
				Console.WriteLine("Done");

				Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to merge sort " + results.Count + " results from " + K + " lists of " + N + " items each");

				//foreach (var result in results)
				//{
				//	Console.WriteLine(result.Get<int>(-1));
				//}
			}
		}
		private static async Task BenchBulkInsertThenBulkReadAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct, bool instrumented = false)
		{
			// test that we can bulk write / bulk read

			var timings = instrumented ? new List<KeyValuePair<double, double>>() : null;

			// put test values inside a namespace
			var subspace = db.Partition("BulkInsert");

			// cleanup everything
			using (var tr = db.BeginTransaction(ct))
			{
				tr.ClearRange(subspace);
				await tr.CommitAsync();
			}

			// insert all values (batched)
			Console.WriteLine("Inserting " + N.ToString("N0", CultureInfo.InvariantCulture) + " keys: ");
			var insert = Stopwatch.StartNew();
			int batches = 0;
			long bytes = 0;

			var start = Stopwatch.StartNew();

			var tasks = new List<Task>();
			foreach (var worker in FdbKey.Batched(0, N, K, B))
			{
				//hack
				tasks.Add(Task.Run(async () =>
				{
					foreach (var chunk in worker)
					{
						using (var tr = db.BeginTransaction(ct))
						{
							int z = 0;
							foreach (int i in Enumerable.Range(chunk.Key, chunk.Value))
							{
								tr.Set(subspace.Pack(i), Slice.Create(new byte[256]));
								z++;
							}

							//Console.Write("#");
							//Console.WriteLine("  Commiting batch (" + tr.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) " + z + " keys");
							var localStart = start.Elapsed.TotalSeconds;
							await tr.CommitAsync();
							var localDuration = start.Elapsed.TotalSeconds - localStart;
							if (instrumented)
							{
								lock (timings) { timings.Add(new KeyValuePair<double, double>(localStart, localDuration)); }
							}
							Interlocked.Increment(ref batches);
							Interlocked.Add(ref bytes, tr.Size);
						}

					}
				}, ct));

			}
			await Task.WhenAll(tasks);

			insert.Stop();
			Console.WriteLine("Committed " + batches + " batches in " + FormatTimeMilli(insert.Elapsed.TotalMilliseconds) + " (" + FormatTimeMilli(insert.Elapsed.TotalMilliseconds / batches) + " / batch, " + FormatTimeMicro(insert.Elapsed.TotalMilliseconds / N) + " / item");
			Console.WriteLine("Throughput " + FormatThroughput(bytes, insert.Elapsed.TotalSeconds));

			if (instrumented)
			{
				var sb = new StringBuilder();
				foreach (var kvp in timings)
				{
					sb.Append(kvp.Key.ToString()).Append(';').Append((kvp.Key + kvp.Value).ToString()).Append(';').Append(kvp.Value.ToString()).AppendLine();
				}
#if DEBUG
				System.IO.File.WriteAllText(@"c:\temp\fdb\timings_" + N + "_" + K + "_" + B + ".csv", sb.ToString());
#else
                Console.WriteLine(sb.ToString());
#endif
			}

			// Read values

			using (var tr = db.BeginTransaction(ct))
			{
				Console.WriteLine("Reading all keys...");
				var sw = Stopwatch.StartNew();
				var items = await tr.GetRangeStartsWith(subspace).ToListAsync();
				sw.Stop();
				Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to get " + items.Count.ToString("N0", CultureInfo.InvariantCulture) + " results");
			}
		}
		private static async Task BenchUpdateLotsOfKeysAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// change one byte in a large number of keys

			var location = db.Partition("lists");

			var rnd = new Random();
			var keys = Enumerable.Range(0, N).Select(x => location.Pack(x)).ToArray();

			Console.WriteLine("> creating " + N + " half filled keys");
			var segment = new byte[60];

			for (int i = 0; i < (segment.Length >> 1); i++) segment[i] = (byte) rnd.Next(256);
			using (var trans = db.BeginTransaction(ct))
			{
				for (int i = 0; i < N; i += 1000)
				{
					for (int k = i; k < i + 1000 && k < N; k++)
					{
						trans.Set(keys[k], Slice.Create(segment));
					}
					await trans.CommitAsync();
					Console.Write("\r" + i + " / " + N);
				}
			}

			Console.WriteLine("\rChanging one byte in each of the " + N + " keys...");
			var sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				Console.WriteLine("READ");
				// get all the lists
				var data = await trans.GetBatchAsync(keys);

				// change them
				Console.WriteLine("CHANGE");
				for (int i = 0; i < data.Length; i++)
				{
					var list = data[i].Value.GetBytes();
					list[(list.Length >> 1) + 1] = (byte) rnd.Next(256);
					trans.Set(data[i].Key, Slice.Create(list));
				}

				Console.WriteLine("COMMIT");
				await trans.CommitAsync();
			}
			sw.Stop();

			Console.WriteLine("Took " + sw.Elapsed + " to patch one byte in " + N + " lists (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + " /update)");

		}
		private static async Task BenchClearAsync(IFdbDatabase db, int N, CancellationToken ct)
		{
			// clear a lot of small keys, in a single transaction

			var location = db.Partition(Slice.FromAscii("hello"));

			var sw = Stopwatch.StartNew();
			using (var trans = db.BeginTransaction(ct))
			{
				for (int i = 0; i < N; i++)
				{
					trans.Clear(location.Pack(i));
				}

				await trans.CommitAsync();
			}
			sw.Stop();
			Console.WriteLine("Took " + sw.Elapsed + " to clear " + N + " items (" + FormatTimeMicro(sw.Elapsed.TotalMilliseconds / N) + "/write)");
		}