Esempio n. 1
0
 public static IEnumerable <KeyValuePair <TKey, ulong> > ScanUInt64 <TKey>(Tx tx, byte[] key,
                                                                           Func <IFdbTuple, TKey> keygen, string traceName, int skip = 0)
 {
     return(InternalScan(tx, key, (slice, bytes) => {
         var value = Slice.Create(bytes).ToUInt64();
         var k = keygen(FdbTuple.Unpack(slice));
         return new KeyValuePair <TKey, ulong>(k, value);
     }, traceName, skip));
 }
Esempio n. 2
0
 public static IEnumerable <KeyValuePair <TKey, TValue> > Scan <TKey, TValue>(Tx tx, byte[] key,
                                                                              Func <IFdbTuple, TKey> keygen, string traceName, int skip = 0)
 {
     return(InternalScan(tx, key, (slice, bytes) => {
         using (var mem = new MemoryStream(bytes)) {
             var value = Serializer.Deserialize <TValue>(mem);
             var k = keygen(FdbTuple.Unpack(slice));
             return new KeyValuePair <TKey, TValue>(k, value);
         }
     }, traceName, skip));
 }
        public static IFdbTuple Unpack(this FdbSubspace subspace, Slice key)
        {
            // We special case 'Slice.Nil' because it is returned by GetAsync(..) when the key does not exist
            // This is to simplifiy decoding logic where the caller could do "var foo = FdbTuple.Unpack(await tr.GetAsync(...))" and then only have to test "if (foo != null)"
            if (key.IsNull)
            {
                return(null);
            }

            return(new FdbPrefixedTuple(subspace.Key, FdbTuple.Unpack(subspace.ExtractAndCheck(key))));
        }
                public override FdbTuple <T1, T2> DecodeComposite(Slice encoded, int items)
                {
                    if (items < 1 || items > 2)
                    {
                        throw new ArgumentOutOfRangeException("items", items, "Item count must be either 1 or 2");
                    }

                    var t = FdbTuple.Unpack(encoded).OfSize(items);

                    Contract.Assert(t != null);

                    return(FdbTuple.Create <T1, T2>(
                               t.Get <T1>(0),
                               items >= 2 ? t.Get <T2>(1) : default(T2)
                               ));
                }
            public List <KeyValuePair <string, IFdbTuple> > Build(KeyValuePair <IFdbTuple, Slice>[] parts)
            {
                if (parts == null)
                {
                    throw new ArgumentNullException("parts");
                }

                var list = new List <KeyValuePair <string, IFdbTuple> >(parts.Length);

                foreach (var part in parts)
                {
                    list.Add(new KeyValuePair <string, IFdbTuple>(
                                 part.Key.Last <string>(),
                                 FdbTuple.Unpack(part.Value)
                                 ));
                }
                return(list);
            }
                public override FdbTuple <T1, T2> DecodeComposite(Slice encoded, int items)
                {
                    if (items < 1 || items > 2)
                    {
                        throw new ArgumentOutOfRangeException("items", items, "Item count must be either 1 or 2");
                    }

                    var t = FdbTuple.Unpack(encoded);

                    Contract.Assert(t != null);
                    if (t.Count != items)
                    {
                        throw new ArgumentException(String.Format("Was expected {0} items, but decoded tuple only has {1}", items, t.Count));
                    }

                    return(FdbTuple.Create <T1, T2>(
                               t.Get <T1>(0),
                               items >= 2 ? t.Get <T2>(1) : default(T2)
                               ));
                }
Esempio n. 7
0
        /// <summary>Return the list the names of all fields of an hashset</summary>
        /// <param name="trans">Transaction that will be used for this request</param>
        /// <param name="id">Unique identifier of the hashset</param>
        /// <returns>List of all fields. If the list is empty, the hashset does not exist</returns>
        public Task <List <string> > GetKeys(IFdbReadOnlyTransaction trans, IFdbTuple id, CancellationToken cancellationToken = default(CancellationToken))
        {
            //note: As of Beta2, FDB does not have a fdb_get_range that only return the keys. That means that we will have to also read the values from the db, in order to just get the names of the fields :(
            //TODO: find a way to optimize this ?

            if (trans == null)
            {
                throw new ArgumentNullException("trans");
            }
            if (id == null)
            {
                throw new ArgumentNullException("id");
            }

            var prefix  = GetKey(id);
            var results = new Dictionary <string, Slice>(StringComparer.OrdinalIgnoreCase);

            return(trans
                   .GetRange(FdbKeyRange.StartsWith(prefix))
                   .Select((kvp) => ParseFieldKey(FdbTuple.Unpack(kvp.Key)))
                   .ToListAsync(cancellationToken));
        }
        public static IFdbTuple[] Unpack(this FdbSubspace subspace, [NotNull] Slice[] keys)
        {
            if (keys == null)
            {
                throw new ArgumentNullException("keys");
            }

            var prefix = subspace.Key;
            var tuples = new IFdbTuple[keys.Length];

            if (keys.Length > 0)
            {
                for (int i = 0; i < keys.Length; i++)
                {
                    if (keys[i].HasValue)
                    {
                        tuples[i] = new FdbPrefixedTuple(prefix, FdbTuple.Unpack(subspace.ExtractAndCheck(keys[i])));
                    }
                }
            }

            return(tuples);
        }
Esempio n. 9
0
 public static IEnumerable <TKey> ScanKeys <TKey>(Tx tx, byte[] key, Func <IFdbTuple, TKey> keygen,
                                                  string traceName, int skip = 0)
 {
     return(InternalScan(tx, key, (slice, bytes) => keygen(FdbTuple.Unpack(slice)), traceName, skip));
 }
Esempio n. 10
0
        private async Task RunAsync(IFdbDatabase db, FdbSubspace location, CancellationToken ct, Action done, int N, int K, int W)
        {
            if (db == null)
            {
                throw new ArgumentNullException("db");
            }

            StringBuilder sb = new StringBuilder();

            db = new FdbLoggedDatabase(db, false, false, (log) =>
            {
                sb.AppendLine(log.Log.GetTimingsReport(true));
                //Console.WriteLine(log.Log.GetTimingsReport(true));
            });
            try
            {
                var workerPool = new FdbWorkerPool(location);
                Console.WriteLine("workerPool at " + location.Key.ToAsciiOrHexaString());

                var workerSignal = new AsyncCancelableMutex(ct);
                var clientSignal = new AsyncCancelableMutex(ct);

                int taskCounter = 0;

                int msgSent     = 0;
                int msgReceived = 0;

                Func <FdbWorkerMessage, CancellationToken, Task> handler = async(msg, _ct) =>
                {
                    Interlocked.Increment(ref msgReceived);

                    //await Task.Delay(10 + Math.Abs(msg.Id.GetHashCode()) % 50);
                    await Task.Delay(10).ConfigureAwait(false);
                };

                Func <int, Task> worker = async(id) =>
                {
                    await workerSignal.Task.ConfigureAwait(false);

                    Console.WriteLine("Worker #" + id + " is starting");
                    try
                    {
                        await workerPool.RunWorkerAsync(db, handler, ct).ConfigureAwait(false);
                    }
                    finally
                    {
                        Console.WriteLine("Worker #" + id + " has stopped");
                    }
                };

                Func <int, Task> client = async(id) =>
                {
                    await clientSignal.Task.ConfigureAwait(false);

                    await Task.Delay(10).ConfigureAwait(false);

                    var rnd = new Random(id * 111);
                    for (int i = 0; i < N; i++)
                    {
                        var taskId   = Slice.FromString("T" + Interlocked.Increment(ref taskCounter));
                        var taskBody = Slice.FromString("Message " + (i + 1) + " of " + N + " from client #" + id);

                        await workerPool.ScheduleTaskAsync(db, taskId, taskBody, ct).ConfigureAwait(false);

                        Interlocked.Increment(ref msgSent);

                        //if (i > 0 && i % 10 == 0) Console.WriteLine("@@@ Client#" + id + " pushed " + (i + 1) + " / " + N + " messages");

                        switch (rnd.Next(5))
                        {
                        case 0: await Task.Delay(10).ConfigureAwait(false); break;

                        case 1: await Task.Delay(100).ConfigureAwait(false); break;

                        case 2: await Task.Delay(500).ConfigureAwait(false); break;
                        }
                    }
                    Console.WriteLine("@@@ Client#" + id + " has finished!");
                };

                Func <string, Task> dump = async(label) =>
                {
                    Console.WriteLine("<dump label='" + label + "' key='" + location.Key.ToAsciiOrHexaString() + "'>");
                    using (var tr = db.BeginTransaction(ct))
                    {
                        await tr.Snapshot
                        .GetRange(FdbKeyRange.StartsWith(location.Key))
                        .ForEachAsync((kvp) =>
                        {
                            Console.WriteLine(" - " + FdbTuple.Unpack(location.Extract(kvp.Key)) + " = " + kvp.Value.ToAsciiOrHexaString());
                        }).ConfigureAwait(false);
                    }
                    Console.WriteLine("</dump>");
                };

                var workers = Enumerable.Range(0, W).Select((i) => worker(i)).ToArray();
                var clients = Enumerable.Range(0, K).Select((i) => client(i)).ToArray();

                DateTime start       = DateTime.Now;
                DateTime last        = start;
                int      lastHandled = -1;
                using (var timer = new Timer((_) =>
                {
                    var now = DateTime.Now;
                    Console.WriteLine("@@@ T=" + now.Subtract(start) + ", sent: " + msgSent.ToString("N0") + ", recv: " + msgReceived.ToString("N0"));
                    Console.WriteLine("### Workers: " + workerPool.IdleWorkers + " / " + workerPool.ActiveWorkers + " (" + new string('#', workerPool.IdleWorkers) + new string('.', workerPool.ActiveWorkers - workerPool.IdleWorkers) + "), sent: " + workerPool.MessageScheduled.ToString("N0") + ", recv: " + workerPool.MessageReceived.ToString("N0") + ", delta: " + (workerPool.MessageScheduled - workerPool.MessageReceived).ToString("N0") + ", busy: " + workerPool.WorkerBusyTime + " (avg " + workerPool.WorkerAverageBusyDuration.TotalMilliseconds.ToString("N3") + " ms)");

                    if (now.Subtract(last).TotalSeconds >= 10)
                    {
                        //dump("timer").GetAwaiter().GetResult();
                        last = now;
                        if (lastHandled == msgReceived)
                        {                         // STALL ?
                            Console.WriteLine("STALL! ");
                            done();
                        }
                        lastHandled = msgReceived;
                    }

                    if (msgReceived >= K * N)
                    {
                        dump("complete").GetAwaiter().GetResult();
                        done();
                    }
                }, null, 1000, 1000))
                {
                    var sw = Stopwatch.StartNew();

                    // start the workers
                    workerSignal.Set(async: true);
                    await Task.Delay(500);

                    await dump("workers started");

                    // start the clients
                    clientSignal.Set(async: true);

                    await Task.WhenAll(clients);

                    Console.WriteLine("Clients completed after " + sw.Elapsed);

                    await Task.WhenAll(workers);

                    Console.WriteLine("Workers completed after " + sw.Elapsed);
                }
            }
            finally
            {
                Console.WriteLine("---------------------------------------------------------------------------");
                Console.WriteLine("Transaction logs:");
                Console.WriteLine();

                Console.WriteLine(sb.ToString());
            }
        }
        public void ReadHeader(CancellationToken ct)
        {
            ct.ThrowIfCancellationRequested();

            // minimum header prolog size is 64 but most will only a single page
            // we can preallocate a full page, and we will resize it later if needed

            var reader = m_file.CreateReader(0, SnapshotFormat.HEADER_METADATA_BYTES);

            // "PNDB"
            var signature = reader.ReadFixed32();
            // v1.0
            uint major = reader.ReadFixed16();
            uint minor = reader.ReadFixed16();

            m_version = new Version((int)major, (int)minor);
            // FLAGS
            m_dbFlags = (SnapshotFormat.Flags)reader.ReadFixed64();
            // Database ID
            m_uid = new Uuid128(reader.ReadBytes(16).GetBytes());
            // Database Version
            m_sequence = reader.ReadFixed64();
            // Number of items in the database
            m_itemCount = checked ((long)reader.ReadFixed64());
            // Database Timestamp
            m_timestamp = reader.ReadFixed64();
            // Page Size
            m_pageSize = reader.ReadFixed32();
            // Header Size
            m_headerSize = reader.ReadFixed32();

            Contract.Assert(!reader.HasMore);

            #region Sanity checks

            // Signature
            if (signature != SnapshotFormat.HEADER_MAGIC_NUMBER)
            {
                throw ParseError("Invalid magic number");
            }

            // Version
            if (m_version.Major != 1)
            {
                throw ParseError("Unsupported file version (major)");
            }
            if (m_version.Minor > 0)
            {
                throw ParseError("Unsupported file version (minor)");
            }

            // Flags

            // Page Size
            if (m_pageSize != UnmanagedHelpers.NextPowerOfTwo(m_pageSize))
            {
                throw ParseError("Page size ({0}) is not a power of two", m_pageSize);
            }
            if (m_pageSize < SnapshotFormat.HEADER_METADATA_BYTES)
            {
                throw ParseError("Page size ({0}) is too small", m_pageSize);
            }
            if (m_pageSize > 1 << 20)
            {
                throw ParseError("Page size ({0}) is too big", m_pageSize);
            }

            // Header Size
            if (m_headerSize < 64 + 4 + 4)
            {
                throw ParseError("Header size ({0}) is too small", m_headerSize);
            }
            if (m_headerSize > m_file.Length)
            {
                throw ParseError("Header size is bigger than the file itself ({0} < {1})", m_headerSize, m_file.Length);
            }
            if (m_headerSize > 1 << 10)
            {
                throw ParseError("Header size ({0}) exceeds the maximum allowed size", m_headerSize);
            }

            #endregion

            // we know the page size and header size, read the rest...

            // read the rest
            reader = m_file.CreateReader(0, m_headerSize);
            reader.Skip(SnapshotFormat.HEADER_METADATA_BYTES);

            // parse the attributes
            Contract.Assert(reader.Offset == SnapshotFormat.HEADER_METADATA_BYTES);
            var attributeCount = checked ((int)reader.ReadFixed32());
            if (attributeCount < 0 || attributeCount > 1024)
            {
                throw ParseError("Attributes count is invalid");
            }

            var attributes = new Dictionary <string, IFdbTuple>(attributeCount);
            for (int i = 0; i < attributeCount; i++)
            {
                var name = reader.ReadVarbytes().ToSlice();                 //TODO: max size ?
                if (name.IsNullOrEmpty)
                {
                    throw ParseError("Header attribute name is empty");
                }

                var data  = reader.ReadVarbytes().ToSlice();                    //TODO: max size + have a small scratch pad buffer for these ?
                var value = FdbTuple.Unpack(data);
                attributes.Add(name.ToUnicode(), value);
            }
            m_attributes = attributes;

            // read the header en marker
            var marker = reader.ReadFixed32();
            if (marker != uint.MaxValue)
            {
                throw ParseError("Header end marker is invalid");
            }

            // verify the header checksum
            uint actualHeaderChecksum = SnapshotFormat.ComputeChecksum(reader.Base, reader.Offset);
            uint headerChecksum       = reader.ReadFixed32();
            m_headerChecksum = headerChecksum;

            if (headerChecksum != actualHeaderChecksum)
            {
                throw ParseError("The header checksum does not match ({0} != {1}). This may be an indication of data corruption", headerChecksum, actualHeaderChecksum);
            }

            m_dataStart = RoundUp(m_headerSize, m_pageSize);
            m_hasHeader = true;
        }
        public async Task Test_FdbMap_With_Custom_Key_Encoder()
        {
            // Use a table as a backing store for the rules of a Poor Man's firewall, where each keys are the IPEndPoint (tcp only!), and the values are "pass" or "block"

            // Encode IPEndPoint as the (IP, Port,) encoded with the Tuple codec
            // note: there is a much simpler way or creating composite keys, this is just a quick and dirty test!
            var keyEncoder = KeyValueEncoders.Bind <IPEndPoint>(
                (ipe) => ipe == null ? Slice.Empty : FdbTuple.Pack(ipe.Address, ipe.Port),
                (packed) =>
            {
                if (packed.IsNullOrEmpty)
                {
                    return(default(IPEndPoint));
                }
                var t = FdbTuple.Unpack(packed);
                return(new IPEndPoint(t.Get <IPAddress>(0), t.Get <int>(1)));
            }
                );

            var rules = new Dictionary <IPEndPoint, string>()
            {
                { new IPEndPoint(IPAddress.Parse("172.16.12.34"), 6667), "block" },
                { new IPEndPoint(IPAddress.Parse("192.168.34.56"), 80), "pass" },
                { new IPEndPoint(IPAddress.Parse("192.168.34.56"), 443), "pass" }
            };

            using (var db = await OpenTestPartitionAsync())
            {
                var location = await GetCleanDirectory(db, "Collections", "Maps");

                var map = new FdbMap <IPEndPoint, string>("Firewall", location.Partition("Hosts"), keyEncoder, KeyValueEncoders.Values.StringEncoder);

                // import all the rules
                await db.WriteAsync((tr) =>
                {
                    foreach (var rule in rules)
                    {
                        map.Set(tr, rule.Key, rule.Value);
                    }
                }, this.Cancellation);

#if DEBUG
                await DumpSubspace(db, location);
#endif

                // test the rules

                using (var tr = db.BeginTransaction(this.Cancellation))
                {
                    var value = await map.GetAsync(tr, new IPEndPoint(IPAddress.Parse("172.16.12.34"), 6667));

                    Assert.That(value, Is.EqualTo("block"));

                    value = await map.GetAsync(tr, new IPEndPoint(IPAddress.Parse("192.168.34.56"), 443));

                    Assert.That(value, Is.EqualTo("pass"));

                    var baz = new IPEndPoint(IPAddress.Parse("172.16.12.34"), 80);
                    Assert.That(async() => await map.GetAsync(tr, baz), Throws.InstanceOf <KeyNotFoundException>());

                    var opt = await map.TryGetAsync(tr, baz);

                    Assert.That(opt.HasValue, Is.False);
                }
            }
        }
        public async Task Test_Range_Except_Composite_Key()
        {
            using (var db = await OpenTestPartitionAsync())
            {
                // get a clean new directory
                var location = await GetCleanDirectory(db, "Queries", "ExceptComposite");

                // Items contains a list of all ("user", id) that were created
                var locItems = await location.CreateOrOpenAsync(db, "Items", this.Cancellation);

                // Processed contain the list of all ("user", id) that were processed
                var locProcessed = await location.CreateOrOpenAsync(db, "Processed", this.Cancellation);

                // the goal is to have a query that returns the list of all unprocessed items (ie: in Items but not in Processed)

                await db.WriteAsync((tr) =>
                {
                    // Items
                    tr.Set(locItems.Pack("userA", 10093), Slice.Empty);
                    tr.Set(locItems.Pack("userA", 19238), Slice.Empty);
                    tr.Set(locItems.Pack("userB", 20003), Slice.Empty);
                    // Processed
                    tr.Set(locProcessed.Pack("userA", 19238), Slice.Empty);
                }, this.Cancellation);

                // the query (Items ∩ Processed) should return (userA, 10093) and (userB, 20003)

                // First Method: pass in a list of key ranges, and merge on the (Slice, Slice) pairs
                Trace.WriteLine("Method 1:");
                var results = await db.QueryAsync((tr) =>
                {
                    var query = tr.Except(
                        new[] { locItems.ToRange(), locProcessed.ToRange() },
                        (kv) => FdbTuple.Unpack(kv.Key).Substring(-2),                       // note: keys come from any of the two ranges, so we must only keep the last 2 elements of the tuple
                        FdbTupleComparisons.Composite <string, int>()                        // compares t[0] as a string, and t[1] as an int
                        );

                    // problem: Except() still returns the original (Slice,Slice) pairs from the first range,
                    // meaning that we still need to unpack agin the key (this time knowing the location)
                    return(query.Select(kv => locItems.Unpack(kv.Key)));
                }, this.Cancellation);

                foreach (var r in results)
                {
                    Trace.WriteLine(r);
                }
                Assert.That(results.Count, Is.EqualTo(2));
                Assert.That(results[0], Is.EqualTo(FdbTuple.Create("userA", 10093)));
                Assert.That(results[1], Is.EqualTo(FdbTuple.Create("userB", 20003)));

                // Second Method: pre-parse the queries, and merge on the results directly
                Trace.WriteLine("Method 2:");
                results = await db.QueryAsync((tr) =>
                {
                    var items = tr
                                .GetRange(locItems.ToRange())
                                .Select(kv => locItems.Unpack(kv.Key));

                    var processed = tr
                                    .GetRange(locProcessed.ToRange())
                                    .Select(kv => locProcessed.Unpack(kv.Key));

                    // items and processed are lists of (string, int) tuples, we can compare them directly
                    var query = items.Except(processed, FdbTupleComparisons.Composite <string, int>());

                    // query is already a list of tuples, nothing more to do
                    return(query);
                }, this.Cancellation);

                foreach (var r in results)
                {
                    Trace.WriteLine(r);
                }
                Assert.That(results.Count, Is.EqualTo(2));
                Assert.That(results[0], Is.EqualTo(FdbTuple.Create("userA", 10093)));
                Assert.That(results[1], Is.EqualTo(FdbTuple.Create("userB", 20003)));
            }
        }