Example #1
0
        public void Memory_consumption_parallel_writes_and_merges()
        {
            var r    = new Random(1234);
            var keys = new Queue <string>();

            AddItems(keys, 200);
            var baseline = GC.GetTotalMemory(true);

            using (var d = new FirkinDictionary <string, string>(_path)) {
                var t       = 0;
                var capture = "";
                var done    = false;
                var n       = 0;
                var merger  = new Thread(() => {
                    var m = 0;
                    while (!done)
                    {
                        if (n >= 5000)
                        {
                            m++;
                            var before = GC.GetTotalMemory(true);
                            Console.WriteLine(
                                "merge {0}, before {1:0.00}MB)",
                                m,
                                (before - baseline) / 1024 / 1024
                                );
                            var expiredKeys = (from entry in d
                                               where entry.Value.Length != 0 && r.Next(4) == 1
                                               select entry.Key).ToArray();
                            foreach (var key in expiredKeys)
                            {
                                d.Remove(key);
                            }
                            var during = GC.GetTotalMemory(true);
                            Console.WriteLine(
                                "merge {0}, during {1:0.00}MB)",
                                m,
                                (during - baseline) / 1024 / 1024
                                );
                            d.Merge();

                            var after = GC.GetTotalMemory(true);
                            var c     = d.Count;
                            Console.WriteLine(
                                "merge {0}, iteration {1}, items: {2}, after {3:0.00}MB, storage {4:0.00}bytes/item)",
                                m,
                                t,
                                c,
                                (after - baseline) / 1024 / 1024,
                                (after - baseline) / c
                                );
                            n = 0;
                        }
                    }
                })
                {
                    IsBackground = true
                };
                merger.Start();
                while (keys.Any())
                {
                    n++;
                    t++;
                    var key = keys.Dequeue();
                    var v   = TestUtil.GetRandomString(r);
                    if (d.ContainsKey(key))
                    {
                        var x = d[key];
                        capture = ".." + x;
                    }
                    d[key] = v;
                    switch (r.Next(10))
                    {
                    case 1:
                        keys.Enqueue(key);
                        break;

                    case 4:
                        if (keys.Count < 200)
                        {
                            AddItems(keys, 10);
                        }
                        break;
                    }
                    if (t >= 1000000)
                    {
                        break;
                    }
                    if (keys.Count < 50)
                    {
                        AddItems(keys, 100);
                    }
                }
                done = true;
                merger.Join();
                Console.WriteLine("total items {0} after {1} iterations with {2} left in queue", d.Count, t, keys.Count);
                _log.Debug(capture.Substring(0, 10));
            }
        }
Example #2
0
        public void Concurrent_read_write_delete_consistency_with_multiple_merges()
        {
            var r    = new Random(1234);
            var id   = 0;
            var keys = new Queue <string>();

            AddKeys(keys, 200, ref id);
            var mergeCounter = 0;
            var merges       = 0;

            _hash = new FirkinHash <string>(_path, 100 * 2048);
            var dictionary    = new Dictionary <string, byte[]>();
            var modified      = new HashSet <string>();
            var workers       = new List <Thread>();
            var faults        = new List <Exception>();
            var iterations    = 0;
            var maxIterations = 10000;

            for (var i = 0; i < 10; i++)
            {
                var workerId = i;
                var worker   = new Thread(() => {
                    try {
                        _log.DebugFormat("worker {0} started", workerId);
                        while (iterations < maxIterations)
                        {
                            var j = Interlocked.Increment(ref iterations);
                            if (j % 100 == 0)
                            {
                                _log.DebugFormat("iteration {0}", j);
                            }
                            Interlocked.Increment(ref mergeCounter);
                            string k;
                            lock (keys) {
                                if (keys.Count < 10)
                                {
                                    AddKeys(keys, 100, ref id);
                                }
                                k = keys.Dequeue();
                            }
                            var entry = _hash.Get(k);
                            var v     = TestUtil.GetRandomBytes(r);
                            if (entry != null)
                            {
                                lock (keys) {
                                    if (modified.Contains(k))
                                    {
                                        continue;
                                    }
                                    modified.Add(k);
                                }
                                var v2 = entry.ReadBytes();
                                if (r.Next(4) == 3)
                                {
                                    lock (dictionary) {
                                        dictionary.Remove(k);
                                    }
                                    _hash.Delete(k);
                                }
                                else
                                {
                                    lock (dictionary) {
                                        dictionary[k] = v;
                                    }
                                    _hash.Put(k, v.ToStream(), v.Length);
                                }
                            }
                            else
                            {
                                lock (dictionary) {
                                    dictionary[k] = v;
                                }
                                _hash.Put(k, v.ToStream(), v.Length);
                            }
                            lock (keys) {
                                if (!modified.Contains(k) && r.Next(3) == 1)
                                {
                                    keys.Enqueue(k);
                                }
                            }
                            Thread.Sleep(10);
                        }
                        _log.DebugFormat("worker {0} finished", workerId);
                    } catch (Exception e) {
                        faults.Add(e);
                    }
                })
                {
                    IsBackground = true
                };
                worker.Start();
                workers.Add(worker);
            }
            var start = DateTime.UtcNow;

            while (iterations < maxIterations)
            {
                if (faults.Any())
                {
                    throw faults.First();
                }
                if (DateTime.UtcNow > start.AddMinutes(5))
                {
                    throw new TimeoutException(string.Format("didn't finish, merges: {0}, items: {1}, queue: {2}, existing modified: {2}", merges, _hash.Count, keys.Count, modified.Count));
                }
                if (mergeCounter >= 2000)
                {
                    merges++;
                    mergeCounter = 0;
                    _hash.Merge();
                    _log.DebugFormat("merge {0} completed", merges);
                }
            }
            foreach (var worker in workers)
            {
                worker.Join();
            }
            var files = 0;

            foreach (var file in Directory.GetFiles(_path))
            {
                _log.DebugFormat(Path.GetFileName(file));
                if (Path.GetExtension(file) == ".data")
                {
                    files++;
                }
            }
            _log.DebugFormat("merges: {0}, items {1}, existing modified: {2}, files: {3}", merges, _hash.Count, modified.Count, files);
            Assert.AreEqual(dictionary.Count, _hash.Count);
            foreach (var pair in dictionary)
            {
                Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
            }
        }
Example #3
0
        public void Memory_consumption()
        {
            var r    = new Random(1234);
            var keys = new Queue <string>();

            AddItems(keys, 200);
            var    baseline = GC.GetTotalMemory(true);
            string capture  = "";

            using (var d = new FirkinDictionary <string, string>(_path)) {
                var n = 0;
                var t = 0;
                var m = 0;
                while (keys.Any())
                {
                    n++;
                    t++;
                    var key = keys.Dequeue();
                    var v   = TestUtil.GetRandomString(r);
                    if (d.ContainsKey(key))
                    {
                        var x = d[key];
                        capture = ".." + x;
                    }
                    d[key] = v;
                    switch (r.Next(10))
                    {
                    case 1:
                        keys.Enqueue(key);
                        break;

                    case 4:
                        if (keys.Count < 200)
                        {
                            AddItems(keys, 10);
                        }
                        break;
                    }
                    if (n >= 5000)
                    {
                        m++;
                        var before = GC.GetTotalMemory(true);
                        d.Merge();
                        var after = GC.GetTotalMemory(true);
                        var c     = d.Count;
                        Console.WriteLine(
                            "merge {0}, iteration {1}, items: {2}, before {3:0.00}MB, after {4:0.00}MB, storage {5:0.00}bytes/item)",
                            m,
                            t,
                            c,
                            (before - baseline) / 1024 / 1024,
                            (after - baseline) / 1024 / 1024,
                            (after - baseline) / c
                            );
                        n = 0;
                    }
                    if (t >= 200000)
                    {
                        break;
                    }
                    if (keys.Count < 50)
                    {
                        AddItems(keys, 100);
                    }
                }
                Console.WriteLine("total items {0} after {1} iterations with {2} left in queue", d.Count, t, keys.Count);
                _log.Debug(capture.Substring(0, 10));
            }
        }