public Slice EncodeKey(FdbTuple <T1, T2> value)
 {
     return(EncodeComposite(value, 2));
 }
 public Slice EncodeValue(T key)
 {
     return(FdbTuple.EncodeKey(key));
 }
 public virtual Slice EncodeKey(T1 item1, T2 item2)
 {
     return(EncodeComposite(FdbTuple.Create <T1, T2>(item1, item2), 2));
 }
 public Slice EncodeComposite(FdbTuple <T1, T2> key, int items)
 {
     return(this.Encoder.EncodeComposite(new FdbTuple <T1, T2, T3>(key.Item1, key.Item2, default(T3)), items));
 }
        private static async Task MainAsync(CancellationToken ct)
        {
            // change the path to the native lib if not default
            if (NATIVE_PATH != null)
            {
                Fdb.Options.SetNativeLibPath(NATIVE_PATH);
            }

            // uncomment this to enable network thread tracing
            // FdbCore.TracePath = Path.Combine(Path.GetTempPath(), "fdb");

            int apiVersion = Fdb.GetMaxApiVersion();

            Console.WriteLine("Max API Version: " + apiVersion);

            try
            {
                Console.WriteLine("Starting network thread...");
                Fdb.Start();                 // this will select API version 21
                Console.WriteLine("> Up and running");

                Console.WriteLine("Connecting to local cluster...");
                using (var cluster = await Fdb.CreateClusterAsync(CLUSTER_FILE, ct))
                {
                    Console.WriteLine("> Connected!");

                    Console.WriteLine("Opening database 'DB'...");
                    using (var db = await cluster.OpenDatabaseAsync(DB_NAME, FdbSubspace.Create(FdbTuple.Create(SUBSPACE)), false, ct))
                    {
                        Console.WriteLine("> Connected to db '{0}'", db.Name);

                        // get coordinators
                        var cf = await Fdb.System.GetCoordinatorsAsync(db, ct);

                        Console.WriteLine("Coordinators: " + cf.ToString());

                        // clear everything
                        using (var tr = db.BeginTransaction(ct))
                        {
                            Console.WriteLine("Clearing subspace " + db.GlobalSpace + " ...");
                            tr.ClearRange(db.GlobalSpace);
                            await tr.CommitAsync();

                            Console.WriteLine("> Database cleared");
                        }

                        Console.WriteLine("----------");

                        await TestSimpleTransactionAsync(db, ct);

                        Console.WriteLine("----------");

                        await BenchInsertSmallKeysAsync(db, N, 16, ct);                         // some guid
                        await BenchInsertSmallKeysAsync(db, N, 60 * 4, ct);                     // one Int32 per minutes, over an hour
                        await BenchInsertSmallKeysAsync(db, N, 512, ct);                        // small JSON payload

                        ////await BenchInsertSmallKeysAsync(db, N, 4096, ct); // typical small cunk size
                        ////await BenchInsertSmallKeysAsync(db, N / 10, 65536, ct); // typical medium chunk size
                        //await BenchInsertSmallKeysAsync(db, 1, 100000, ct); // Maximum value size (as of beta 1)

                        ////// insert keys in parrallel
                        await BenchConcurrentInsert(db, 1, 100, 512, ct);
                        await BenchConcurrentInsert(db, 1, 1000, 512, ct);
                        await BenchConcurrentInsert(db, 1, 10000, 512, ct);

                        await BenchConcurrentInsert(db, 1, N, 16, ct);
                        await BenchConcurrentInsert(db, 2, N, 16, ct);
                        await BenchConcurrentInsert(db, 4, N, 16, ct);
                        await BenchConcurrentInsert(db, 8, N, 16, ct);
                        await BenchConcurrentInsert(db, 16, N, 16, ct);

                        //await BenchSerialWriteAsync(db, N, ct);
                        //await BenchSerialReadAsync(db, N, ct);
                        //await BenchConcurrentReadAsync(db, N, ct);

                        //await BenchClearAsync(db, N, ct);

                        await BenchUpdateSameKeyLotsOfTimesAsync(db, 1000, ct);

                        await BenchUpdateLotsOfKeysAsync(db, 1000, ct);

                        await BenchBulkInsertThenBulkReadAsync(db, 100 * 1000, 50, 128, ct);
                        await BenchBulkInsertThenBulkReadAsync(db, 100 * 1000, 128, 50, ct);

                        ////await BenchBulkInsertThenBulkReadAsync(db, 1 * 1000 * 1000, 50, 128, ct);

                        await BenchMergeSortAsync(db, 100, 3, 20, ct);
                        await BenchMergeSortAsync(db, 1000, 10, 100, ct);
                        await BenchMergeSortAsync(db, 100, 100, 100, ct);
                        await BenchMergeSortAsync(db, 100, 1000, 100, ct);

                        Console.WriteLine("time to say goodbye...");
                    }
                }
            }
            finally
            {
                Console.WriteLine("### DONE ###");
                Fdb.Stop();
            }
#if DEBUG
            Console.ReadLine();
#endif
        }
 public Slice EncodeKey(FdbTuple <T1, T2> key)
 {
     return(EncodeComposite(key, 2));
 }
 public void Clear([NotNull] IFdbTransaction trans, FdbTuple <T1, T2, T3> key)
 {
     trans.Clear(EncodeKey(key));
 }
예제 #8
0
        public async Task Test_Can_Batch_Aggregate_With_Transformed_Result()
        {
            const int N = 50 * 1000;

            using (var db = await OpenTestPartitionAsync())
            {
                Log("Bulk inserting {0:N0} items...", N);
                var location = await GetCleanDirectory(db, "Bulk", "Aggregate");

                Log("Preparing...");

                var rnd    = new Random(2403);
                var source = Enumerable.Range(1, N).Select((x) => new KeyValuePair <int, int>(x, rnd.Next(1000))).ToList();

                await Fdb.Bulk.WriteAsync(
                    db,
                    source.Select((x) => new KeyValuePair <Slice, Slice>(location.Keys.Encode(x.Key), Slice.FromInt32(x.Value))),
                    this.Cancellation
                    );

                Log("Reading...");

                int    chunks  = 0;
                var    sw      = Stopwatch.StartNew();
                double average = await Fdb.Bulk.AggregateAsync(
                    db,
                    source.Select(x => location.Keys.Encode(x.Key)),
                    () => FdbTuple.Create(0L, 0L),
                    async (xs, ctx, state) =>
                {
                    Interlocked.Increment(ref chunks);
                    Log("> Called with batch of " + xs.Length.ToString("N0") + " at offset " + ctx.Position.ToString("N0") + " of gen " + ctx.Generation + " with step " + ctx.Step + " and cooldown " + ctx.Cooldown + " (genElapsed=" + ctx.ElapsedGeneration + ", totalElapsed=" + ctx.ElapsedTotal + ")");

                    var throttle = Task.Delay(TimeSpan.FromMilliseconds(10 + (xs.Length / 25) * 5));                             // magic numbers to try to last longer than 5 sec
                    var results  = await ctx.Transaction.GetValuesAsync(xs);
                    await throttle;

                    long sum = 0L;
                    for (int i = 0; i < results.Length; i++)
                    {
                        sum += results[i].ToInt32();
                    }
                    return(FdbTuple.Create(state.Item1 + sum, state.Item2 + results.Length));
                },
                    (state) => (double)state.Item1 / state.Item2,
                    this.Cancellation
                    );

                sw.Stop();

                Log("Done in {0:N3} sec and {1} chunks", sw.Elapsed.TotalSeconds, chunks);

                double actual = (double)source.Sum(x => (long)x.Value) / source.Count;
                Log("> Computed average of the {0:N0} random values is {1:N3}", N, average);
                Log("> Actual average of the {0:N0} random values is {1:N3}", N, actual);
                Assert.That(average, Is.EqualTo(actual).Within(double.Epsilon));

                // cleanup because this test can produce a lot of data
                await location.RemoveAsync(db, this.Cancellation);
            }
        }
 public virtual FdbKeyRange ToRange(T1 key1, T2 key2, T3 key3)
 {
     return(FdbTuple.ToRange(this.EncodeKey(key1, key2, key3)));
 }
 public void Set([NotNull] IFdbTransaction trans, FdbTuple <T1, T2, T3> key, Slice value)
 {
     trans.Set(EncodeKey(key), value);
 }
 Slice ICompositeKeyEncoder <FdbTuple <T1, T2, T3> > .EncodeComposite(FdbTuple <T1, T2, T3> key, int items)
 {
     return(this.Key + m_encoder.EncodeComposite(key, items));
 }
 public virtual Slice EncodeKey(FdbTuple <T1, T2, T3> key)
 {
     return(this.Key + m_encoder.EncodeKey(key));
 }
 protected IFdbTuple ToRelativePath(IEnumerable <string> path)
 {
     return(ToRelativePath(path == null ? null :  FdbTuple.FromEnumerable <string>(path)));
 }
 public FdbKeyRange ToRange(T value)
 {
     //REVIEW: which semantic for ToRange() should we use?
     return(FdbTuple.ToRange(Encode(value)));
 }
 public void SetId(Dictionary <string, IFdbTuple> document, TId id)
 {
     document[this.IdName] = FdbTuple.Create(id);
 }
예제 #16
0
        public async Task Test_Can_Batch_ForEach_WithContextAndState()
        {
            const int N = 50 * 1000;

            using (var db = await OpenTestPartitionAsync())
            {
                Log("Bulk inserting {0:N0} items...", N);
                var location = await GetCleanDirectory(db, "Bulk", "ForEach");

                Log("Preparing...");

                await Fdb.Bulk.WriteAsync(
                    db,
                    Enumerable.Range(1, N).Select((x) => new KeyValuePair <Slice, Slice>(location.Keys.Encode(x), Slice.FromInt32(x))),
                    this.Cancellation
                    );

                Log("Reading...");

                long total  = 0;
                long count  = 0;
                int  chunks = 0;
                var  sw     = Stopwatch.StartNew();
                await Fdb.Bulk.ForEachAsync(
                    db,
                    Enumerable.Range(1, N).Select(x => location.Keys.Encode(x)),
                    () => FdbTuple.Create(0L, 0L),                     // (sum, count)
                    (xs, ctx, state) =>
                {
                    Interlocked.Increment(ref chunks);
                    Log("> Called with batch of {0:N0} at offset {1:N0} of gen {2} with step {3} and cooldown {4} (generation = {5:N3} sec, total = {6:N3} sec)", xs.Length, ctx.Position, ctx.Generation, ctx.Step, ctx.Cooldown, ctx.ElapsedGeneration.TotalSeconds, ctx.ElapsedTotal.TotalSeconds);

                    var t = ctx.Transaction.GetValuesAsync(xs);
                    Thread.Sleep(TimeSpan.FromMilliseconds(10 + (xs.Length / 25) * 5)); // magic numbers to try to last longer than 5 sec
                    var results = t.Result;                                             // <-- this is bad practice, never do that in real life, 'mkay?

                    long sum = 0;
                    for (int i = 0; i < results.Length; i++)
                    {
                        sum += results[i].ToInt32();
                    }
                    return(FdbTuple.Create(
                               state.Item1 + sum,                          // updated sum
                               state.Item2 + results.Length                // updated count
                               ));
                },
                    (state) =>
                {
                    Interlocked.Add(ref total, state.Item1);
                    Interlocked.Add(ref count, state.Item2);
                },
                    this.Cancellation
                    );

                sw.Stop();

                Log("Done in {0:N3} sec and {1} chunks", sw.Elapsed.TotalSeconds, chunks);
                Log("Sum of integers 1 to {0:N0} is {1:N0}", count, total);

                // cleanup because this test can produce a lot of data
                await location.RemoveAsync(db, this.Cancellation);
            }
        }
 public abstract Slice EncodeComposite(FdbTuple <T1, T2> key, int items);
            /// <summary>Estimate the number of keys in the specified range.</summary>
            /// <param name="db">Database used for the operation</param>
            /// <param name="beginInclusive">Key defining the beginning of the range</param>
            /// <param name="endExclusive">Key defining the end of the range</param>
            /// <param name="onProgress">Optional callback called everytime the count is updated. The first argument is the current count, and the second argument is the last key that was found.</param>
            /// <param name="cancellationToken">Token used to cancel the operation</param>
            /// <returns>Number of keys k such that <paramref name="beginInclusive"/> &lt;= k &gt; <paramref name="endExclusive"/></returns>
            /// <remarks>If the range contains a large of number keys, the operation may need more than one transaction to complete, meaning that the number will not be transactionally accurate.</remarks>
            public static async Task <long> EstimateCountAsync([NotNull] IFdbDatabase db, Slice beginInclusive, Slice endExclusive, IProgress <FdbTuple <long, Slice> > onProgress, CancellationToken cancellationToken)
            {
                const int INIT_WINDOW_SIZE = 1 << 8;            // start at 256 //1024
                const int MAX_WINDOW_SIZE  = 1 << 13;           // never use more than 4096
                const int MIN_WINDOW_SIZE  = 64;                // use range reads when the windows size is smaller than 64

                if (db == null)
                {
                    throw new ArgumentNullException("db");
                }
                if (endExclusive < beginInclusive)
                {
                    throw new ArgumentException("The end key cannot be less than the begin key", "endExclusive");
                }

                cancellationToken.ThrowIfCancellationRequested();

                // To count the number of items in the range, we will scan it using a key selector with an offset equal to our window size
                // > if the returned key is still inside the range, we add the window size to the counter, and start again from the current key
                // > if the returned key is outside the range, we reduce the size of the window, and start again from the previous key
                // > if the returned key is exactly equal to the end of range, OR if the window size was 1, then we stop

                // Since we don't know in advance if the range contains 1 key or 1 Billion keys, choosing a good value for the window size is critical:
                // > if it is too small and the range is very large, we will need too many sequential reads and the network latency will quickly add up
                // > if it is too large and the range is small, we will spend too many times halving the window size until we get the correct value

                // A few optimizations are possible:
                // > we could start with a small window size, and then double its size on every full segment (up to a maximum)
                // > for the last segment, we don't need to wait for a GetKey to complete before issuing the next, so we could split the segment into 4 (or more), do the GetKeyAsync() in parallel, detect the quarter that cross the boundary, and iterate again until the size is small
                // > once the window size is small enough, we can switch to using GetRange to read the last segment in one shot, instead of iterating with window size 16, 8, 4, 2 and 1 (the wost case being 2^N - 1 items remaning)

                // note: we make a copy of the keys because the operation could take a long time and the key's could prevent a potentially large underlying buffer from being GCed
                var cursor = beginInclusive.Memoize();
                var end    = endExclusive.Memoize();

                using (var tr = db.BeginReadOnlyTransaction(cancellationToken))
                {
#if TRACE_COUNTING
                    tr.Annotate("Estimating number of keys in range {0}", FdbKeyRange.Create(beginInclusive, endExclusive));
#endif

                    tr.SetOption(FdbTransactionOption.ReadYourWritesDisable);

                    // start looking for the first key in the range
                    cursor = await tr.Snapshot.GetKeyAsync(FdbKeySelector.FirstGreaterOrEqual(cursor)).ConfigureAwait(false);

                    if (cursor >= end)
                    {                     // the range is empty !
                        return(0);
                    }

                    // we already have seen one key, so add it to the count
#if TRACE_COUNTING
                    int iter = 1;
#endif
                    long counter = 1;
                    // start with a medium-sized window
                    int  windowSize = INIT_WINDOW_SIZE;
                    bool last       = false;

                    while (cursor < end)
                    {
                        Contract.Assert(windowSize > 0);

                        var          selector = FdbKeySelector.FirstGreaterOrEqual(cursor) + windowSize;
                        Slice        next     = Slice.Nil;
                        FdbException error    = null;
                        try
                        {
                            next = await tr.Snapshot.GetKeyAsync(selector).ConfigureAwait(false);

#if TRACE_COUNTING
                            ++iter;
#endif
                        }
                        catch (FdbException e)
                        {
                            error = e;
                        }

                        if (error != null)
                        {
                            // => from this point, the count returned will not be transactionally accurate
                            if (error.Code == FdbError.PastVersion)
                            {                             // the transaction used up its time window
                                tr.Reset();
                            }
                            else
                            {                             // check to see if we can continue...
                                await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
                            }
                            // retry
                            tr.SetOption(FdbTransactionOption.ReadYourWritesDisable);
                            continue;
                        }

                        //BUGBUG: GetKey(...) always truncate the result to \xFF if the selected key would be past the end,
                        // so we need to fall back immediately to the binary search and/or get_range if next == \xFF

                        if (next > end)
                        {                         // we have reached past the end, switch to binary search
                            last = true;

                            // if window size is already 1, then we have counted everything (the range.End key does not exist in the db)
                            if (windowSize == 1)
                            {
                                break;
                            }

                            if (windowSize <= MIN_WINDOW_SIZE)
                            {                             // The window is small enough to switch to reading for counting (will be faster than binary search)
#if TRACE_COUNTING
                                tr.Annotate("Switch to reading all items (window size = {0})", windowSize);
#endif

                                // Count the keys by reading them. Also, we know that there can not be more than windowSize - 1 remaining
                                int n = await tr.Snapshot
                                        .GetRange(
                                    FdbKeySelector.FirstGreaterThan(cursor),                                             // cursor has already been counted once
                                    FdbKeySelector.FirstGreaterOrEqual(end),
                                    new FdbRangeOptions()
                                {
                                    Limit = windowSize - 1
                                }
                                    )
                                        .CountAsync()
                                        .ConfigureAwait(false);

                                counter += n;
                                if (onProgress != null)
                                {
                                    onProgress.Report(FdbTuple.Create(counter, end));
                                }
#if TRACE_COUNTING
                                ++iter;
#endif
                                break;
                            }

                            windowSize >>= 1;
                            continue;
                        }

                        // the range is not finished, advance the cursor
                        counter += windowSize;
                        cursor   = next;
                        if (onProgress != null)
                        {
                            onProgress.Report(FdbTuple.Create(counter, cursor));
                        }

                        if (!last)
                        {                         // double the size of the window if we are not in the last segment
                            windowSize = Math.Min(windowSize << 1, MAX_WINDOW_SIZE);
                        }
                    }
#if TRACE_COUNTING
                    tr.Annotate("Found {0} keys in {1} iterations", counter, iter);
#endif
                    return(counter);
                }
            }
        public void Test_RangeDictionary_Black_And_White()
        {
            // we have a space from 0 <= x < 100 that is empty
            // we insert a random serie of ranges that are either Black or White
            // after each run, we check that all ranges are correctly ordered, merged, and so on.

            const int S = 100;            // [0, 100)
            const int N = 1000;           // number of repetitions
            const int K = 25;             // max number of ranges inserted per run

            var rnd  = new Random();
            int seed = rnd.Next();

            Console.WriteLine("Using random seed " + seed);
            rnd = new Random(seed);

            for (int i = 0; i < N; i++)
            {
                var cola = new ColaRangeDictionary <int, RangeColor>();

                var witnessColors  = new RangeColor?[S];
                var witnessIndexes = new int?[S];

                // choose a random number of ranges
                int k = rnd.Next(3, K);

                Trace.WriteLine("");
                Trace.WriteLine(String.Format("# Starting run {0} with {1} insertions", i, k));

                int p = 0;
                for (int j = 0; j < k; j++)
                {
                    var begin = rnd.Next(S);
                    // 50/50 of inserting a single element, or a range
                    var end = (rnd.Next(2) == 1 ? begin : rnd.Next(2) == 1 ? rnd.Next(begin, S) : Math.Min(S - 1, begin + rnd.Next(5))) + 1;                     // reminder: +1 because 'end' is EXCLUDED
                    Assert.That(begin, Is.LessThan(end));
                    // 50/50 for the coloring
                    var color = rnd.Next(2) == 1 ? RangeColor.White : RangeColor.Black;

                    // uncomment this line if you want to reproduce this exact run
                    //Console.WriteLine("\t\tcola.Mark(" + begin + ", " + end + ", RangeColor." + color + ");");

                    cola.Mark(begin, end, color);
                    for (int z = begin; z < end; z++)
                    {
                        witnessColors[z]  = color;
                        witnessIndexes[z] = p;
                    }

                    //Console.WriteLine(" >        |{0}|", String.Join("", witnessIndexes.Select(x => x.HasValue ? (char)('A' + x.Value) : ' ')));
                    Debug.WriteLine("          |{0}| + [{1,2}, {2,2}) = {3} > #{4,2} [ {5} ]", String.Join("", witnessColors.Select(w => !w.HasValue ? ' ' : w.Value == RangeColor.Black ? '#' : '°')), begin, end, color, cola.Count, String.Join(", ", cola));

                    ++p;
                }

                // pack the witness list into ranges
                var        witnessRanges = new List <FdbTuple <int, int, RangeColor> >();
                RangeColor?prev          = null;
                p = 0;
                for (int z = 1; z < S; z++)
                {
                    if (witnessColors[z] != prev)
                    {                     // switch
                        if (prev.HasValue)
                        {
                            witnessRanges.Add(FdbTuple.Create(p, z, prev.Value));
                        }
                        p    = z;
                        prev = witnessColors[z];
                    }
                }

                Trace.WriteLine(String.Format("> RANGES: #{0,2} [ {1} ]", cola.Count, String.Join(", ", cola)));
                Trace.WriteLine(String.Format("          #{0,2} [ {1} ]", witnessRanges.Count, String.Join(", ", witnessRanges)));

                var counter         = new int[S];
                var observedIndexes = new int?[S];
                var observedColors  = new RangeColor?[S];
                p = 0;
                foreach (var range in cola)
                {
                    Assert.That(range.Begin < range.End, "Begin < End {0}", range);
                    for (int z = range.Begin; z < range.End; z++)
                    {
                        observedIndexes[z] = p;
                        counter[z]++;
                        observedColors[z] = range.Value;
                    }
                    ++p;
                }

                Trace.WriteLine(String.Format("> INDEXS: |{0}|", String.Join("", observedIndexes.Select(x => x.HasValue ? (char)('A' + x.Value) : ' '))));
                Trace.WriteLine(String.Format("          |{0}|", String.Join("", witnessIndexes.Select(x => x.HasValue ? (char)('A' + x.Value) : ' '))));

                Trace.WriteLine(String.Format("> COLORS: |{0}|", String.Join("", observedColors.Select(w => !w.HasValue ? ' ' : w.Value == RangeColor.Black ? '#' : '°'))));
                Trace.WriteLine(String.Format("          |{0}|", String.Join("", witnessColors.Select(w => !w.HasValue ? ' ' : w.Value == RangeColor.Black ? '#' : '°'))));

                // verify the colors
                foreach (var range in cola)
                {
                    for (int z = range.Begin; z < range.End; z++)
                    {
                        Assert.That(range.Value, Is.EqualTo(witnessColors[z]), "#{0} color mismatch for {1}", z, range);
                        Assert.That(counter[z], Is.EqualTo(1), "Duplicate at offset #{0} for {1}", z, range);
                    }
                }

                // verify that nothing was missed
                for (int z = 0; z < S; z++)
                {
                    if (witnessColors[z] == null)
                    {
                        if (counter[z] != 0)
                        {
                            Trace.WriteLine("@ FAIL!!! |" + new string('-', z) + "^");
                        }
                        Assert.That(counter[z], Is.EqualTo(0), "Should be void at offset {0}", z);
                    }
                    else
                    {
                        if (counter[z] != 1)
                        {
                            Trace.WriteLine("@ FAIL!!! |" + new string('-', z) + "^");
                        }
                        Assert.That(counter[z], Is.EqualTo(1), "Should be filled with {1} at offset {0}", z, witnessColors[z]);
                    }
                }
            }
        }