Exemplo n.º 1
0
        public async Task Test_FdbKey_Batched()
        {
            // we want numbers from 0 to 999 split between 5 workers that will consume batches of 20 items at a time
            // > we get 5 enumerables that all take ranges from the same pool and all complete where there is no more values

            const int N = 1000;
            const int B = 20;
            const int W = 5;

            var query = FdbKey.Batched(0, N, W, B);

            Assert.That(query, Is.Not.Null);

            var batches = query.ToArray();

            Assert.That(batches, Is.Not.Null);
            Assert.That(batches.Length, Is.EqualTo(W));
            Assert.That(batches, Is.All.Not.Null);

            var used = new bool[N];

            var signal = new TaskCompletionSource <object>();

            // each batch should return new numbers
            var tasks = batches.Select(async(iterator, id) =>
            {
                // force async
                await signal.Task.ConfigureAwait(false);

                foreach (var chunk in iterator)
                {
                    // kvp = (offset, count)
                    // > count should always be 20
                    // > offset should always be a multiple of 20
                    // > there should never be any overlap between workers
                    Assert.That(chunk.Value, Is.EqualTo(B), "{0}:{1}", chunk.Key, chunk.Value);
                    Assert.That(chunk.Key % B, Is.EqualTo(0), "{0}:{1}", chunk.Key, chunk.Value);

                    lock (used)
                    {
                        for (int i = chunk.Key; i < chunk.Key + chunk.Value; i++)
                        {
                            if (used[i])
                            {
                                Assert.Fail("Duplicate index {0} chunk {1}:{2} for worker {3}", i, chunk.Key, chunk.Value, id);
                            }
                            else
                            {
                                used[i] = true;
                            }
                        }
                    }

                    await Task.Delay(1).ConfigureAwait(false);
                }
            }).ToArray();

            ThreadPool.UnsafeQueueUserWorkItem((_) => signal.TrySetResult(null), null);

            await Task.WhenAll(tasks);

            Assert.That(used, Is.All.True);
        }
        private static async Task BenchBulkInsertThenBulkReadAsync(IFdbDatabase db, int N, int K, int B, CancellationToken ct, bool instrumented = false)
        {
            // test that we can bulk write / bulk read

            var timings = instrumented ? new List <KeyValuePair <double, double> >() : null;

            // put test values inside a namespace
            var subspace = db.Partition("BulkInsert");

            // cleanup everything
            using (var tr = db.BeginTransaction(ct))
            {
                tr.ClearRange(subspace);
                await tr.CommitAsync();
            }

            // insert all values (batched)
            Console.WriteLine("Inserting " + N.ToString("N0", CultureInfo.InvariantCulture) + " keys: ");
            var  insert  = Stopwatch.StartNew();
            int  batches = 0;
            long bytes   = 0;

            var start = Stopwatch.StartNew();

            var tasks = new List <Task>();

            foreach (var worker in FdbKey.Batched(0, N, K, B))
            {
                //hack
                tasks.Add(Task.Run(async() =>
                {
                    foreach (var chunk in worker)
                    {
                        using (var tr = db.BeginTransaction(ct))
                        {
                            int z = 0;
                            foreach (int i in Enumerable.Range(chunk.Key, chunk.Value))
                            {
                                tr.Set(subspace.Pack(i), Slice.Create(new byte[256]));
                                z++;
                            }

                            //Console.Write("#");
                            //Console.WriteLine("  Commiting batch (" + tr.Size.ToString("N0", CultureInfo.InvariantCulture) + " bytes) " + z + " keys");
                            var localStart = start.Elapsed.TotalSeconds;
                            await tr.CommitAsync();
                            var localDuration = start.Elapsed.TotalSeconds - localStart;
                            if (instrumented)
                            {
                                lock (timings) { timings.Add(new KeyValuePair <double, double>(localStart, localDuration)); }
                            }
                            Interlocked.Increment(ref batches);
                            Interlocked.Add(ref bytes, tr.Size);
                        }
                    }
                }, ct));
            }
            await Task.WhenAll(tasks);

            insert.Stop();
            Console.WriteLine("Committed " + batches + " batches in " + FormatTimeMilli(insert.Elapsed.TotalMilliseconds) + " (" + FormatTimeMilli(insert.Elapsed.TotalMilliseconds / batches) + " / batch, " + FormatTimeMicro(insert.Elapsed.TotalMilliseconds / N) + " / item");
            Console.WriteLine("Throughput " + FormatThroughput(bytes, insert.Elapsed.TotalSeconds));

            if (instrumented)
            {
                var sb = new StringBuilder();
                foreach (var kvp in timings)
                {
                    sb.Append(kvp.Key.ToString()).Append(';').Append((kvp.Key + kvp.Value).ToString()).Append(';').Append(kvp.Value.ToString()).AppendLine();
                }
#if DEBUG
                System.IO.File.WriteAllText(@"c:\temp\fdb\timings_" + N + "_" + K + "_" + B + ".csv", sb.ToString());
#else
                Console.WriteLine(sb.ToString());
#endif
            }

            // Read values

            using (var tr = db.BeginTransaction(ct))
            {
                Console.WriteLine("Reading all keys...");
                var sw    = Stopwatch.StartNew();
                var items = await tr.GetRangeStartsWith(subspace).ToListAsync();

                sw.Stop();
                Console.WriteLine("Took " + FormatTimeMilli(sw.Elapsed.TotalMilliseconds) + " to get " + items.Count.ToString("N0", CultureInfo.InvariantCulture) + " results");
            }
        }