Esempio n. 1
0
        public void Read_write_delete_consistency_with_merge_before_read()
        {
            var r = new Random(1234);

            _hash = new FirkinHash <string>(_path, 10 * 1024);
            var dictionary = new Dictionary <string, byte[]>();

            for (var i = 0; i < 200; i++)
            {
                var k = "k" + r.Next(100);
                if (r.Next(4) == 3)
                {
                    dictionary.Remove(k);
                    _hash.Delete(k);
                }
                else
                {
                    var v = TestUtil.GetRandomBytes(r);
                    dictionary[k] = v;
                    _hash.Put(k, v.ToStream(), v.Length);
                }
                _hash.Get("k" + r.Next(100));
            }
            _hash.Merge();
            Assert.AreEqual(dictionary.Count, _hash.Count);
            foreach (var pair in dictionary)
            {
                Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
            }
        }
Esempio n. 2
0
        public void Read_write_delete_consistency_with_multiple_merges()
        {
            var r = new Random(1234);

            _hash = new FirkinHash <string>(_path, 10 * 2048);
            var dictionary = new Dictionary <string, byte[]>();

            for (var j = 0; j < 10; j++)
            {
                for (var i = 0; i < 1000; i++)
                {
                    var k = "k" + r.Next(100);
                    if (r.Next(4) == 3)
                    {
                        dictionary.Remove(k);
                        _hash.Delete(k);
                    }
                    else
                    {
                        var v = TestUtil.GetRandomBytes(r);
                        dictionary[k] = v;
                        _hash.Put(k, v.ToStream(), v.Length);
                    }
                    _hash.Get("k" + r.Next(100));
                }
                var skip = true;
                foreach (var k in _hash)
                {
                    skip = !skip;
                    if (skip)
                    {
                        continue;
                    }
                    dictionary.Remove(k.Key);
                    _hash.Delete(k.Key);
                }
                _hash.Merge();
            }
            Assert.AreEqual(dictionary.Count, _hash.Count);
            foreach (var pair in dictionary)
            {
                Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
            }
        }
Esempio n. 3
0
        public void Can_delete_record()
        {
            CreateHash();
            var key    = "foo";
            var value  = "bar";
            var stream = value.ToStream();

            _hash.Put(key, stream, stream.Length);
            _hash.Delete(key);
            Assert.IsNull(_hash.Get(key));
        }
Esempio n. 4
0
 public void Read_write_delete_consistency_with_merge_in_middle()
 {
     var r = new Random(1234);
     _hash = new FirkinHash<string>(_path, 10 * 1024);
     var dictionary = new Dictionary<string, byte[]>();
     for(var i = 0; i < 500; i++) {
         var k = "k" + r.Next(100);
         if(r.Next(4) == 3) {
             dictionary.Remove(k);
             _hash.Delete(k);
         } else {
             var v = TestUtil.GetRandomBytes(r);
             dictionary[k] = v;
             _hash.Put(k, v.ToStream(), v.Length);
         }
         _hash.Get("k" + r.Next(100));
     }
     _hash.Merge();
     for(var i = 0; i < 500; i++) {
         var k = "k" + r.Next(100);
         if(r.Next(5) == 5) {
             dictionary.Remove(k);
             _hash.Delete(k);
         } else {
             var v = TestUtil.GetRandomBytes(r);
             dictionary[k] = v;
             _hash.Put(k, v.ToStream(), v.Length);
         }
         _hash.Get("k" + r.Next(100));
     }
     Assert.AreEqual(dictionary.Count, _hash.Count);
     foreach(var pair in dictionary) {
         Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
     }
 }
Esempio n. 5
0
        public void Concurrent_read_write_delete_consistency_with_multiple_merges()
        {
            var r    = new Random(1234);
            var id   = 0;
            var keys = new Queue <string>();

            AddKeys(keys, 200, ref id);
            var mergeCounter = 0;
            var merges       = 0;

            _hash = new FirkinHash <string>(_path, 100 * 2048);
            var dictionary    = new Dictionary <string, byte[]>();
            var modified      = new HashSet <string>();
            var workers       = new List <Thread>();
            var faults        = new List <Exception>();
            var iterations    = 0;
            var maxIterations = 10000;

            for (var i = 0; i < 10; i++)
            {
                var workerId = i;
                var worker   = new Thread(() => {
                    try {
                        _log.DebugFormat("worker {0} started", workerId);
                        while (iterations < maxIterations)
                        {
                            var j = Interlocked.Increment(ref iterations);
                            if (j % 100 == 0)
                            {
                                _log.DebugFormat("iteration {0}", j);
                            }
                            Interlocked.Increment(ref mergeCounter);
                            string k;
                            lock (keys) {
                                if (keys.Count < 10)
                                {
                                    AddKeys(keys, 100, ref id);
                                }
                                k = keys.Dequeue();
                            }
                            var entry = _hash.Get(k);
                            var v     = TestUtil.GetRandomBytes(r);
                            if (entry != null)
                            {
                                lock (keys) {
                                    if (modified.Contains(k))
                                    {
                                        continue;
                                    }
                                    modified.Add(k);
                                }
                                var v2 = entry.ReadBytes();
                                if (r.Next(4) == 3)
                                {
                                    lock (dictionary) {
                                        dictionary.Remove(k);
                                    }
                                    _hash.Delete(k);
                                }
                                else
                                {
                                    lock (dictionary) {
                                        dictionary[k] = v;
                                    }
                                    _hash.Put(k, v.ToStream(), v.Length);
                                }
                            }
                            else
                            {
                                lock (dictionary) {
                                    dictionary[k] = v;
                                }
                                _hash.Put(k, v.ToStream(), v.Length);
                            }
                            lock (keys) {
                                if (!modified.Contains(k) && r.Next(3) == 1)
                                {
                                    keys.Enqueue(k);
                                }
                            }
                            Thread.Sleep(10);
                        }
                        _log.DebugFormat("worker {0} finished", workerId);
                    } catch (Exception e) {
                        faults.Add(e);
                    }
                })
                {
                    IsBackground = true
                };
                worker.Start();
                workers.Add(worker);
            }
            var start = DateTime.UtcNow;

            while (iterations < maxIterations)
            {
                if (faults.Any())
                {
                    throw faults.First();
                }
                if (DateTime.UtcNow > start.AddMinutes(5))
                {
                    throw new TimeoutException(string.Format("didn't finish, merges: {0}, items: {1}, queue: {2}, existing modified: {2}", merges, _hash.Count, keys.Count, modified.Count));
                }
                if (mergeCounter >= 2000)
                {
                    merges++;
                    mergeCounter = 0;
                    _hash.Merge();
                    _log.DebugFormat("merge {0} completed", merges);
                }
            }
            foreach (var worker in workers)
            {
                worker.Join();
            }
            var files = 0;

            foreach (var file in Directory.GetFiles(_path))
            {
                _log.DebugFormat(Path.GetFileName(file));
                if (Path.GetExtension(file) == ".data")
                {
                    files++;
                }
            }
            _log.DebugFormat("merges: {0}, items {1}, existing modified: {2}, files: {3}", merges, _hash.Count, modified.Count, files);
            Assert.AreEqual(dictionary.Count, _hash.Count);
            foreach (var pair in dictionary)
            {
                Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
            }
        }
Esempio n. 6
0
 public void Read_write_delete_consistency_with_multiple_merges()
 {
     var r = new Random(1234);
     _hash = new FirkinHash<string>(_path, 10 * 2048);
     var dictionary = new Dictionary<string, byte[]>();
     for(var j = 0; j < 10; j++) {
         for(var i = 0; i < 1000; i++) {
             var k = "k" + r.Next(100);
             if(r.Next(4) == 3) {
                 dictionary.Remove(k);
                 _hash.Delete(k);
             } else {
                 var v = TestUtil.GetRandomBytes(r);
                 dictionary[k] = v;
                 _hash.Put(k, v.ToStream(), v.Length);
             }
             _hash.Get("k" + r.Next(100));
         }
         var skip = true;
         foreach(var k in _hash) {
             skip = !skip;
             if(skip) {
                 continue;
             }
             dictionary.Remove(k.Key);
             _hash.Delete(k.Key);
         }
         _hash.Merge();
     }
     Assert.AreEqual(dictionary.Count, _hash.Count);
     foreach(var pair in dictionary) {
         Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
     }
 }
Esempio n. 7
0
 public void Concurrent_read_write_delete_consistency_with_multiple_merges()
 {
     var r = new Random(1234);
     var id = 0;
     var keys = new Queue<string>();
     AddKeys(keys, 200, ref id);
     var mergeCounter = 0;
     var merges = 0;
     _hash = new FirkinHash<string>(_path, 100 * 2048);
     var dictionary = new Dictionary<string, byte[]>();
     var modified = new HashSet<string>();
     var workers = new List<Thread>();
     var faults = new List<Exception>();
     var iterations = 0;
     var maxIterations = 10000;
     for(var i = 0; i < 10; i++) {
         var workerId = i;
         var worker = new Thread(() => {
             try {
                 _log.DebugFormat("worker {0} started", workerId);
                 while(iterations < maxIterations) {
                     var j = Interlocked.Increment(ref iterations);
                     if(j % 100 == 0) {
                         _log.DebugFormat("iteration {0}", j);
                     }
                     Interlocked.Increment(ref mergeCounter);
                     string k;
                     lock(keys) {
                         if(keys.Count < 10) {
                             AddKeys(keys, 100, ref id);
                         }
                         k = keys.Dequeue();
                     }
                     var entry = _hash.Get(k);
                     var v = TestUtil.GetRandomBytes(r);
                     if(entry != null) {
                         lock(keys) {
                             if(modified.Contains(k)) {
                                 continue;
                             }
                             modified.Add(k);
                         }
                         var v2 = entry.ReadBytes();
                         if(r.Next(4) == 3) {
                             lock(dictionary) {
                                 dictionary.Remove(k);
                             }
                             _hash.Delete(k);
                         } else {
                             lock(dictionary) {
                                 dictionary[k] = v;
                             }
                             _hash.Put(k, v.ToStream(), v.Length);
                         }
                     } else {
                         lock(dictionary) {
                             dictionary[k] = v;
                         }
                         _hash.Put(k, v.ToStream(), v.Length);
                     }
                     lock(keys) {
                         if(!modified.Contains(k) && r.Next(3) == 1) {
                             keys.Enqueue(k);
                         }
                     }
                     Thread.Sleep(10);
                 }
                 _log.DebugFormat("worker {0} finished", workerId);
             } catch(Exception e) {
                 faults.Add(e);
             }
         }) { IsBackground = true };
         worker.Start();
         workers.Add(worker);
     }
     var start = DateTime.UtcNow;
     while(iterations < maxIterations) {
         if(faults.Any()) {
             throw faults.First();
         }
         if(DateTime.UtcNow > start.AddMinutes(5)) {
             throw new TimeoutException(string.Format("didn't finish, merges: {0}, items: {1}, queue: {2}, existing modified: {2}", merges, _hash.Count, keys.Count, modified.Count));
         }
         if(mergeCounter >= 2000) {
             merges++;
             mergeCounter = 0;
             _hash.Merge();
             _log.DebugFormat("merge {0} completed", merges);
         }
     }
     foreach(var worker in workers) {
         worker.Join();
     }
     var files = 0;
     foreach(var file in Directory.GetFiles(_path)) {
         _log.DebugFormat(Path.GetFileName(file));
         if(Path.GetExtension(file) == ".data") {
             files++;
         }
     }
     _log.DebugFormat("merges: {0}, items {1}, existing modified: {2}, files: {3}", merges, _hash.Count, modified.Count, files);
     Assert.AreEqual(dictionary.Count, _hash.Count);
     foreach(var pair in dictionary) {
         Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
     }
 }