コード例 #1
0
        public void Read_write_delete_consistency_with_merge_before_read()
        {
            var r = new Random(1234);

            _hash = new FirkinHash <string>(_path, 10 * 1024);
            var dictionary = new Dictionary <string, byte[]>();

            for (var i = 0; i < 200; i++)
            {
                var k = "k" + r.Next(100);
                if (r.Next(4) == 3)
                {
                    dictionary.Remove(k);
                    _hash.Delete(k);
                }
                else
                {
                    var v = TestUtil.GetRandomBytes(r);
                    dictionary[k] = v;
                    _hash.Put(k, v.ToStream(), v.Length);
                }
                _hash.Get("k" + r.Next(100));
            }
            _hash.Merge();
            Assert.AreEqual(dictionary.Count, _hash.Count);
            foreach (var pair in dictionary)
            {
                Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
            }
        }
コード例 #2
0
ファイル: TFirkinHash.cs プロジェクト: nxtplace/Firkin
 public void Active_rolls_over_at_size_barrier()
 {
     _hash = new FirkinHash<string>(_path, 30);
     var stream = "bar".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     stream.Position = 0;
     _hash.Put("foo2", stream, stream.Length);
     stream.Position = 0;
     _hash.Put("foo3", stream, stream.Length);
     stream.Position = 0;
     Assert.AreEqual(4, Directory.GetFiles(_path).Count());
 }
コード例 #3
0
        public void Active_rolls_over_at_size_barrier()
        {
            _hash = new FirkinHash <string>(_path, 30);
            var stream = "bar".ToStream();

            _hash.Put("foo1", stream, stream.Length);
            stream.Position = 0;
            _hash.Put("foo2", stream, stream.Length);
            stream.Position = 0;
            _hash.Put("foo3", stream, stream.Length);
            stream.Position = 0;
            Assert.AreEqual(4, Directory.GetFiles(_path).Count());
        }
コード例 #4
0
ファイル: TFirkinHash.cs プロジェクト: nxtplace/Firkin
 public void Can_access_keys_across_files_after_hash_reload()
 {
     _hash = new FirkinHash<string>(_path, 30);
     var stream = "bar1".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     stream = "bar2".ToStream();
     _hash.Put("foo2", stream, stream.Length);
     stream = "bar3".ToStream();
     _hash.Put("foo3", stream, stream.Length);
     _hash.Dispose();
     _hash = new FirkinHash<string>(_path, 30);
     Assert.AreEqual("bar3", _hash.Get("foo3").To<string>());
     Assert.AreEqual("bar1", _hash.Get("foo1").To<string>());
     Assert.AreEqual("bar2", _hash.Get("foo2").To<string>());
 }
コード例 #5
0
        public void Can_access_keys_across_files_after_hash_reload()
        {
            _hash = new FirkinHash <string>(_path, 30);
            var stream = "bar1".ToStream();

            _hash.Put("foo1", stream, stream.Length);
            stream = "bar2".ToStream();
            _hash.Put("foo2", stream, stream.Length);
            stream = "bar3".ToStream();
            _hash.Put("foo3", stream, stream.Length);
            _hash.Dispose();
            _hash = new FirkinHash <string>(_path, 30);
            Assert.AreEqual("bar3", _hash.Get("foo3").To <string>());
            Assert.AreEqual("bar1", _hash.Get("foo1").To <string>());
            Assert.AreEqual("bar2", _hash.Get("foo2").To <string>());
        }
コード例 #6
0
ファイル: TFirkinHash.cs プロジェクト: samus/Firkin
 public void Can_call_merge_and_enumerate_data()
 {
     _hash = new FirkinHash<string>(_path, 60);
     var stream = "bar1".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     stream = "bar2".ToStream();
     _hash.Put("foo2", stream, stream.Length);
     stream = "bar3".ToStream();
     _hash.Put("foo3", stream, stream.Length);
     stream = "bar4".ToStream();
     _hash.Put("foo4", stream, stream.Length);
     stream = "bar1x".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     _hash.Merge();
     Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To<string>()).ToArray());
     Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To<string>()).ToArray());
 }
コード例 #7
0
        public void Can_call_merge_and_enumerate_data()
        {
            _hash = new FirkinHash <string>(_path, 60);
            var stream = "bar1".ToStream();

            _hash.Put("foo1", stream, stream.Length);
            stream = "bar2".ToStream();
            _hash.Put("foo2", stream, stream.Length);
            stream = "bar3".ToStream();
            _hash.Put("foo3", stream, stream.Length);
            stream = "bar4".ToStream();
            _hash.Put("foo4", stream, stream.Length);
            stream = "bar1x".ToStream();
            _hash.Put("foo1", stream, stream.Length);
            _hash.Merge();
            Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To <string>()).ToArray());
            Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To <string>()).ToArray());
        }
コード例 #8
0
        public void Read_write_delete_consistency_with_multiple_merges()
        {
            var r = new Random(1234);

            _hash = new FirkinHash <string>(_path, 10 * 2048);
            var dictionary = new Dictionary <string, byte[]>();

            for (var j = 0; j < 10; j++)
            {
                for (var i = 0; i < 1000; i++)
                {
                    var k = "k" + r.Next(100);
                    if (r.Next(4) == 3)
                    {
                        dictionary.Remove(k);
                        _hash.Delete(k);
                    }
                    else
                    {
                        var v = TestUtil.GetRandomBytes(r);
                        dictionary[k] = v;
                        _hash.Put(k, v.ToStream(), v.Length);
                    }
                    _hash.Get("k" + r.Next(100));
                }
                var skip = true;
                foreach (var k in _hash)
                {
                    skip = !skip;
                    if (skip)
                    {
                        continue;
                    }
                    dictionary.Remove(k.Key);
                    _hash.Delete(k.Key);
                }
                _hash.Merge();
            }
            Assert.AreEqual(dictionary.Count, _hash.Count);
            foreach (var pair in dictionary)
            {
                Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
            }
        }
コード例 #9
0
ファイル: TStackoverflow.cs プロジェクト: kouweizhong/Firkin
        public void Read_write_users_with_hash_reload()
        {
            var users = GetDataSource <User>().ToDictionary(k => k.Id, v => GetEntityStream(v));

            if (!users.Any())
            {
                return;
            }
            var path = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
            var hash = new FirkinHash <int>(path);

            try {
                var elapsed = Diagnostics.Time(() => {
                    foreach (var user in users)
                    {
                        hash.Put(user.Key, user.Value, user.Value.Length);
                    }
                });
                Console.WriteLine("Wrote {0} users to firkin @ {1:0,0} users/second)", users.Count, users.Count / elapsed.TotalSeconds);
                hash.Dispose();
                _log.DebugFormat("re-loading hash");
                hash = new FirkinHash <int>(path);
                var comp = new List <Stream[]>();
                elapsed = Diagnostics.Time(() => {
                    foreach (var user in users.OrderBy(x => x.Value.Length))
                    {
                        var stream = hash.Get(user.Key);
                        comp.Add(new[] { new MemoryStream(stream.ReadBytes(stream.Length)), user.Value });
                    }
                });
                Console.WriteLine("Queried {0} users from firkin @ {1:0,0} users/second)", users.Count, users.Count / elapsed.TotalSeconds);
                foreach (var pair in comp)
                {
                    pair[0].Position = 0;
                    pair[1].Position = 0;
                    Assert.AreEqual(pair[0].ReadBytes(pair[0].Length), pair[1].ReadBytes(pair[1].Length));
                }
            } finally {
                hash.Dispose();
                Directory.Delete(path, true);
            }
        }
コード例 #10
0
        public void Can_call_merge_and_retrieve_data()
        {
            _hash = new FirkinHash <string>(_path, 60);
            var stream = "bar1".ToStream();

            _hash.Put("foo1", stream, stream.Length);
            stream = "bar2".ToStream();
            _hash.Put("foo2", stream, stream.Length);
            stream = "bar3".ToStream();
            _hash.Put("foo3", stream, stream.Length);
            stream = "bar4".ToStream();
            _hash.Put("foo4", stream, stream.Length);
            stream = "bar1x".ToStream();
            _hash.Put("foo1", stream, stream.Length);
            _hash.Merge();
            Assert.AreEqual(4, _hash.Count);
            Assert.AreEqual("bar3", _hash.Get("foo3").To <string>());
            Assert.AreEqual("bar1x", _hash.Get("foo1").To <string>());
            Assert.AreEqual("bar2", _hash.Get("foo2").To <string>());
            Assert.AreEqual("bar4", _hash.Get("foo4").To <string>());
        }
コード例 #11
0
ファイル: TFirkinHash.cs プロジェクト: nxtplace/Firkin
 public void Can_call_merge_and_reload_hash_then_retrieve_data()
 {
     _hash = new FirkinHash<string>(_path, 30);
     var stream = "bar1".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     stream = "bar2".ToStream();
     _hash.Put("foo2", stream, stream.Length);
     stream = "bar3".ToStream();
     _hash.Put("foo3", stream, stream.Length);
     stream = "bar4".ToStream();
     _hash.Put("foo4", stream, stream.Length);
     stream = "bar1x".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     _hash.Merge();
     _hash.Dispose();
     _log.DebugFormat("re-loading hash");
     _hash = new FirkinHash<string>(_path, 30);
     Assert.AreEqual("bar3", _hash.Get("foo3").To<string>());
     Assert.AreEqual("bar1x", _hash.Get("foo1").To<string>());
     Assert.AreEqual("bar2", _hash.Get("foo2").To<string>());
     Assert.AreEqual("bar4", _hash.Get("foo4").To<string>());
 }
コード例 #12
0
ファイル: TStackoverflow.cs プロジェクト: kouweizhong/Firkin
        public void Iterate_over_users_with_Firkin()
        {
            var users = GetDataSource <User>().ToDictionary(k => k.Id, v => GetEntityStream(v));

            if (!users.Any())
            {
                return;
            }
            var path = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
            var hash = new FirkinHash <int>(path);

            try {
                var elapsed = Diagnostics.Time(() => {
                    foreach (var user in users)
                    {
                        hash.Put(user.Key, user.Value, user.Value.Length);
                    }
                });
                Console.WriteLine("Wrote {0} users to firkin @ {1:0,0} users/second)", users.Count, users.Count / elapsed.TotalSeconds);
                var comp = new List <KeyValuePair <int, Stream> >();
                elapsed = Diagnostics.Time(() => {
                    foreach (var pair in hash)
                    {
                        comp.Add(new KeyValuePair <int, Stream>(pair.Key, new MemoryStream(pair.Value.ReadBytes(pair.Value.Length))));
                    }
                });
                Console.WriteLine("Queried {0} users from firkin @ {1:0,0} users/second)", users.Count, users.Count / elapsed.TotalSeconds);
                foreach (var pair in comp)
                {
                    var userStream = users[pair.Key];
                    userStream.Position = 0;
                    Assert.AreEqual(userStream.ReadBytes(userStream.Length), pair.Value.ReadBytes(pair.Value.Length));
                }
            } finally {
                hash.Dispose();
                Directory.Delete(path, true);
            }
        }
コード例 #13
0
        public void Can_call_merge_and_reload_hash_then_retrieve_data()
        {
            _hash = new FirkinHash <string>(_path, 30);
            var stream = "bar1".ToStream();

            _hash.Put("foo1", stream, stream.Length);
            stream = "bar2".ToStream();
            _hash.Put("foo2", stream, stream.Length);
            stream = "bar3".ToStream();
            _hash.Put("foo3", stream, stream.Length);
            stream = "bar4".ToStream();
            _hash.Put("foo4", stream, stream.Length);
            stream = "bar1x".ToStream();
            _hash.Put("foo1", stream, stream.Length);
            _hash.Merge();
            _hash.Dispose();
            _log.DebugFormat("re-loading hash");
            _hash = new FirkinHash <string>(_path, 30);
            Assert.AreEqual("bar3", _hash.Get("foo3").To <string>());
            Assert.AreEqual("bar1x", _hash.Get("foo1").To <string>());
            Assert.AreEqual("bar2", _hash.Get("foo2").To <string>());
            Assert.AreEqual("bar4", _hash.Get("foo4").To <string>());
        }
コード例 #14
0
ファイル: TFirkinHash.cs プロジェクト: samus/Firkin
 public void Concurrent_read_write_delete_consistency_with_multiple_merges()
 {
     var r = new Random(1234);
     var id = 0;
     var keys = new Queue<string>();
     AddKeys(keys, 200, ref id);
     var mergeCounter = 0;
     var merges = 0;
     _hash = new FirkinHash<string>(_path, 100 * 2048);
     var dictionary = new Dictionary<string, byte[]>();
     var modified = new HashSet<string>();
     var workers = new List<Thread>();
     var faults = new List<Exception>();
     var iterations = 0;
     var maxIterations = 10000;
     for(var i = 0; i < 10; i++) {
         var workerId = i;
         var worker = new Thread(() => {
             try {
                 _log.DebugFormat("worker {0} started", workerId);
                 while(iterations < maxIterations) {
                     var j = Interlocked.Increment(ref iterations);
                     if(j % 100 == 0) {
                         _log.DebugFormat("iteration {0}", j);
                     }
                     Interlocked.Increment(ref mergeCounter);
                     string k;
                     lock(keys) {
                         if(keys.Count < 10) {
                             AddKeys(keys, 100, ref id);
                         }
                         k = keys.Dequeue();
                     }
                     var entry = _hash.Get(k);
                     var v = TestUtil.GetRandomBytes(r);
                     if(entry != null) {
                         lock(keys) {
                             if(modified.Contains(k)) {
                                 continue;
                             }
                             modified.Add(k);
                         }
                         var v2 = entry.ReadBytes();
                         if(r.Next(4) == 3) {
                             lock(dictionary) {
                                 dictionary.Remove(k);
                             }
                             _hash.Delete(k);
                         } else {
                             lock(dictionary) {
                                 dictionary[k] = v;
                             }
                             _hash.Put(k, v.ToStream(), v.Length);
                         }
                     } else {
                         lock(dictionary) {
                             dictionary[k] = v;
                         }
                         _hash.Put(k, v.ToStream(), v.Length);
                     }
                     lock(keys) {
                         if(!modified.Contains(k) && r.Next(3) == 1) {
                             keys.Enqueue(k);
                         }
                     }
                     Thread.Sleep(10);
                 }
                 _log.DebugFormat("worker {0} finished", workerId);
             } catch(Exception e) {
                 faults.Add(e);
             }
         }) { IsBackground = true };
         worker.Start();
         workers.Add(worker);
     }
     var start = DateTime.UtcNow;
     while(iterations < maxIterations) {
         if(faults.Any()) {
             throw faults.First();
         }
         if(DateTime.UtcNow > start.AddMinutes(5)) {
             throw new TimeoutException(string.Format("didn't finish, merges: {0}, items: {1}, queue: {2}, existing modified: {2}", merges, _hash.Count, keys.Count, modified.Count));
         }
         if(mergeCounter >= 2000) {
             merges++;
             mergeCounter = 0;
             _hash.Merge();
             _log.DebugFormat("merge {0} completed", merges);
         }
     }
     foreach(var worker in workers) {
         worker.Join();
     }
     var files = 0;
     foreach(var file in Directory.GetFiles(_path)) {
         _log.DebugFormat(Path.GetFileName(file));
         if(Path.GetExtension(file) == ".data") {
             files++;
         }
     }
     _log.DebugFormat("merges: {0}, items {1}, existing modified: {2}, files: {3}", merges, _hash.Count, modified.Count, files);
     Assert.AreEqual(dictionary.Count, _hash.Count);
     foreach(var pair in dictionary) {
         Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
     }
 }
コード例 #15
0
ファイル: TFirkinHash.cs プロジェクト: nxtplace/Firkin
 public void Read_write_delete_consistency_with_merge_in_middle()
 {
     var r = new Random(1234);
     _hash = new FirkinHash<string>(_path, 10 * 1024);
     var dictionary = new Dictionary<string, byte[]>();
     for(var i = 0; i < 500; i++) {
         var k = "k" + r.Next(100);
         if(r.Next(4) == 3) {
             dictionary.Remove(k);
             _hash.Delete(k);
         } else {
             var v = TestUtil.GetRandomBytes(r);
             dictionary[k] = v;
             _hash.Put(k, v.ToStream(), v.Length);
         }
         _hash.Get("k" + r.Next(100));
     }
     _hash.Merge();
     for(var i = 0; i < 500; i++) {
         var k = "k" + r.Next(100);
         if(r.Next(5) == 5) {
             dictionary.Remove(k);
             _hash.Delete(k);
         } else {
             var v = TestUtil.GetRandomBytes(r);
             dictionary[k] = v;
             _hash.Put(k, v.ToStream(), v.Length);
         }
         _hash.Get("k" + r.Next(100));
     }
     Assert.AreEqual(dictionary.Count, _hash.Count);
     foreach(var pair in dictionary) {
         Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
     }
 }
コード例 #16
0
ファイル: TFirkinHash.cs プロジェクト: nxtplace/Firkin
 public void CreateHash()
 {
     _hash = new FirkinHash<string>(_path);
 }
コード例 #17
0
ファイル: TFirkinHash.cs プロジェクト: nxtplace/Firkin
 public void Can_call_merge_and_retrieve_data()
 {
     _hash = new FirkinHash<string>(_path, 60);
     var stream = "bar1".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     stream = "bar2".ToStream();
     _hash.Put("foo2", stream, stream.Length);
     stream = "bar3".ToStream();
     _hash.Put("foo3", stream, stream.Length);
     stream = "bar4".ToStream();
     _hash.Put("foo4", stream, stream.Length);
     stream = "bar1x".ToStream();
     _hash.Put("foo1", stream, stream.Length);
     _hash.Merge();
     Assert.AreEqual(4, _hash.Count);
     Assert.AreEqual("bar3", _hash.Get("foo3").To<string>());
     Assert.AreEqual("bar1x", _hash.Get("foo1").To<string>());
     Assert.AreEqual("bar2", _hash.Get("foo2").To<string>());
     Assert.AreEqual("bar4", _hash.Get("foo4").To<string>());
 }
コード例 #18
0
        public void Concurrent_read_write_delete_consistency_with_multiple_merges()
        {
            var r    = new Random(1234);
            var id   = 0;
            var keys = new Queue <string>();

            AddKeys(keys, 200, ref id);
            var mergeCounter = 0;
            var merges       = 0;

            _hash = new FirkinHash <string>(_path, 100 * 2048);
            var dictionary    = new Dictionary <string, byte[]>();
            var modified      = new HashSet <string>();
            var workers       = new List <Thread>();
            var faults        = new List <Exception>();
            var iterations    = 0;
            var maxIterations = 10000;

            for (var i = 0; i < 10; i++)
            {
                var workerId = i;
                var worker   = new Thread(() => {
                    try {
                        _log.DebugFormat("worker {0} started", workerId);
                        while (iterations < maxIterations)
                        {
                            var j = Interlocked.Increment(ref iterations);
                            if (j % 100 == 0)
                            {
                                _log.DebugFormat("iteration {0}", j);
                            }
                            Interlocked.Increment(ref mergeCounter);
                            string k;
                            lock (keys) {
                                if (keys.Count < 10)
                                {
                                    AddKeys(keys, 100, ref id);
                                }
                                k = keys.Dequeue();
                            }
                            var entry = _hash.Get(k);
                            var v     = TestUtil.GetRandomBytes(r);
                            if (entry != null)
                            {
                                lock (keys) {
                                    if (modified.Contains(k))
                                    {
                                        continue;
                                    }
                                    modified.Add(k);
                                }
                                var v2 = entry.ReadBytes();
                                if (r.Next(4) == 3)
                                {
                                    lock (dictionary) {
                                        dictionary.Remove(k);
                                    }
                                    _hash.Delete(k);
                                }
                                else
                                {
                                    lock (dictionary) {
                                        dictionary[k] = v;
                                    }
                                    _hash.Put(k, v.ToStream(), v.Length);
                                }
                            }
                            else
                            {
                                lock (dictionary) {
                                    dictionary[k] = v;
                                }
                                _hash.Put(k, v.ToStream(), v.Length);
                            }
                            lock (keys) {
                                if (!modified.Contains(k) && r.Next(3) == 1)
                                {
                                    keys.Enqueue(k);
                                }
                            }
                            Thread.Sleep(10);
                        }
                        _log.DebugFormat("worker {0} finished", workerId);
                    } catch (Exception e) {
                        faults.Add(e);
                    }
                })
                {
                    IsBackground = true
                };
                worker.Start();
                workers.Add(worker);
            }
            var start = DateTime.UtcNow;

            while (iterations < maxIterations)
            {
                if (faults.Any())
                {
                    throw faults.First();
                }
                if (DateTime.UtcNow > start.AddMinutes(5))
                {
                    throw new TimeoutException(string.Format("didn't finish, merges: {0}, items: {1}, queue: {2}, existing modified: {2}", merges, _hash.Count, keys.Count, modified.Count));
                }
                if (mergeCounter >= 2000)
                {
                    merges++;
                    mergeCounter = 0;
                    _hash.Merge();
                    _log.DebugFormat("merge {0} completed", merges);
                }
            }
            foreach (var worker in workers)
            {
                worker.Join();
            }
            var files = 0;

            foreach (var file in Directory.GetFiles(_path))
            {
                _log.DebugFormat(Path.GetFileName(file));
                if (Path.GetExtension(file) == ".data")
                {
                    files++;
                }
            }
            _log.DebugFormat("merges: {0}, items {1}, existing modified: {2}, files: {3}", merges, _hash.Count, modified.Count, files);
            Assert.AreEqual(dictionary.Count, _hash.Count);
            foreach (var pair in dictionary)
            {
                Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
            }
        }
コード例 #19
0
ファイル: TFirkinHash.cs プロジェクト: samus/Firkin
 public void Read_write_delete_consistency_with_multiple_merges()
 {
     var r = new Random(1234);
     _hash = new FirkinHash<string>(_path, 10 * 2048);
     var dictionary = new Dictionary<string, byte[]>();
     for(var j = 0; j < 10; j++) {
         for(var i = 0; i < 1000; i++) {
             var k = "k" + r.Next(100);
             if(r.Next(4) == 3) {
                 dictionary.Remove(k);
                 _hash.Delete(k);
             } else {
                 var v = TestUtil.GetRandomBytes(r);
                 dictionary[k] = v;
                 _hash.Put(k, v.ToStream(), v.Length);
             }
             _hash.Get("k" + r.Next(100));
         }
         var skip = true;
         foreach(var k in _hash) {
             skip = !skip;
             if(skip) {
                 continue;
             }
             dictionary.Remove(k.Key);
             _hash.Delete(k.Key);
         }
         _hash.Merge();
     }
     Assert.AreEqual(dictionary.Count, _hash.Count);
     foreach(var pair in dictionary) {
         Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes()));
     }
 }
コード例 #20
0
 public void CreateHash()
 {
     _hash = new FirkinHash <string>(_path);
 }