public void TestTransactionLogOptions() { using (TempFile temp = new TempFile()) { temp.Delete(); var opt = new TransactionLogOptions <int, string>(temp.TempPath, PrimitiveSerializer.Int32, PrimitiveSerializer.String); //FileName Assert.AreEqual(temp.TempPath, opt.FileName); //Key/Value serializers Assert.IsTrue(ReferenceEquals(opt.KeySerializer, PrimitiveSerializer.Int32)); Assert.IsTrue(ReferenceEquals(opt.ValueSerializer, PrimitiveSerializer.String)); //FileOptions Assert.AreEqual(FileOptions.WriteThrough, opt.FileOptions); Assert.AreEqual(FileOptions.WriteThrough | FileOptions.Asynchronous, opt.FileOptions |= FileOptions.Asynchronous); //Read Only Assert.AreEqual(false, opt.ReadOnly); Assert.AreEqual(true, opt.ReadOnly = true); //File Buffer Assert.AreEqual(8, opt.FileBuffer); Assert.AreEqual(0x40000, opt.FileBuffer = 0x40000); //Clone Assert.IsFalse(ReferenceEquals(opt, opt.Clone())); using (TransactionLog <int, string> log = new TransactionLog <int, string>(opt)) Assert.AreEqual(0, log.Size); } }
public void TestTransactionLogOptions() { using (TempFile temp = new TempFile()) { temp.Delete(); var opt = new TransactionLogOptions<int, string>(temp.TempPath, PrimitiveSerializer.Int32, PrimitiveSerializer.String); //FileName Assert.AreEqual(temp.TempPath, opt.FileName); //Key/Value serializers Assert.IsTrue(ReferenceEquals(opt.KeySerializer, PrimitiveSerializer.Int32)); Assert.IsTrue(ReferenceEquals(opt.ValueSerializer, PrimitiveSerializer.String)); //FileOptions Assert.AreEqual(FileOptions.WriteThrough, opt.FileOptions); Assert.AreEqual(FileOptions.WriteThrough | FileOptions.Asynchronous, opt.FileOptions |= FileOptions.Asynchronous); //Read Only Assert.AreEqual(false, opt.ReadOnly); Assert.AreEqual(true, opt.ReadOnly = true); //File Buffer Assert.AreEqual(8, opt.FileBuffer); Assert.AreEqual(0x40000, opt.FileBuffer = 0x40000); //Clone Assert.IsFalse(ReferenceEquals(opt, opt.Clone())); using(TransactionLog<int, string> log = new TransactionLog<int,string>(opt)) Assert.AreEqual(0, log.Size); } }
public void TestBenchmarkWriteSpeed() { //Write 2,147,483,776 bytes in: 00:02:09.7934237 (in chunks of 128 bytes) //Write 4,295,032,832 bytes in: 00:00:18.4990581 (in chunks of 65536 bytes) //Logged 2,398,000,000 bytes in: 00:00:36.7621027 string newpath = Path.Combine(@"C:\Temp\LogTest\", Guid.NewGuid() + ".tmp"); using (TempFile tmp = TempFile.Attach(newpath)) { byte[] bytes; DateTime start; //bytes = new byte[128]; //new Random().NextBytes(bytes); //start = DateTime.UtcNow; //using (var io = new FileStream(tmp.TempPath, FileMode.Append, FileAccess.Write, FileShare.Read, 8)) //{ // for (int i = 0; i <= 16777216; i++) // io.Write(bytes, 0, 128); //} //Console.WriteLine("Write {0:n0} bytes in: {1}", tmp.Length, DateTime.UtcNow - start); //tmp.Delete(); var options = new TransactionLogOptions <Guid, byte[]>( tmp.TempPath, PrimitiveSerializer.Guid, PrimitiveSerializer.Bytes) { FileBuffer = ushort.MaxValue, FileOptions = FileOptions.None | FileOptions.SequentialScan, }; Guid[] ids = new Guid[1000000]; for (int i = 0; i < ids.Length; i++) { ids[i] = Guid.NewGuid(); } bytes = new byte[100]; new Random().NextBytes(bytes); start = DateTime.UtcNow; using (var log = new TransactionLog <Guid, byte[]>(options)) { foreach (Guid id in ids) { var token = log.BeginTransaction(); for (int i = 0; i < 20; i++) { log.AddValue(ref token, id, bytes); } log.CommitTransaction(ref token); } } Console.WriteLine("Logged {0:n0} bytes in: {1}", tmp.Length, DateTime.UtcNow - start); } }
public virtual void Initialize() { #region Main Tree Initialization var treeOptions = new BPlusTree <AttributeValue, long> .OptionsV2(AttributeValueSerializer.Global, new PrimitiveSerializer()); if (_configuration.JournalEnabled) { var transactionLogOptions = new TransactionLogOptions <AttributeValue, long>(_path + ".tlog", new AttributeValueSerializer(), new PrimitiveSerializer()); transactionLogOptions.FileOptions = FileOptions.WriteThrough; transactionLogOptions.FileBuffer = 4096; treeOptions.TransactionLog = new TransactionLog <AttributeValue, long>(transactionLogOptions); treeOptions.TransactionLogFileName = transactionLogOptions.FileName; treeOptions.StoragePerformance = StoragePerformance.LogFileNoCache; } else { treeOptions.StoragePerformance = StoragePerformance.LogFileInCache; } if (_configuration.CachePolicy != null) { switch (_configuration.CachePolicy.ToLower()) { case "all": treeOptions.CachePolicy = CachePolicy.All; break; case "none": treeOptions.CachePolicy = CachePolicy.None; break; default: treeOptions.CachePolicy = CachePolicy.Recent; break; } } else { treeOptions.CachePolicy = CachePolicy.Recent; } treeOptions.FileName = _path; treeOptions.StorageType = StorageType.Disk; treeOptions.CreateFile = CreatePolicy.IfNeeded; treeOptions.BTreeOrder = 64; treeOptions.LockingFactory = new IgnoreLockFactory(); treeOptions.CallLevelLock = new IgnoreLocking(); treeOptions.StoragePerformance = StoragePerformance.Default; try { _tree = new BPlusTree <AttributeValue, long>(treeOptions); _tree.EnableCount(); if (LoggerManager.Instance.IndexLogger != null && LoggerManager.Instance.IndexLogger.IsInfoEnabled) { LoggerManager.Instance.IndexLogger.Info("BPlusIndex", "Index (s) " + _indexKey.ToString() + " defined"); } } catch (Exception ex) { if (LoggerManager.Instance.IndexLogger != null) { LoggerManager.Instance.IndexLogger.Error("BPlusIndex", "Error: " + ErrorCodes.Indexes.TREE_INITIALIZATION_FAILURE + " - Failed to initialize Index for attribute(s) " + _indexKey.ToString() + Environment.NewLine + ex.ToString()); throw new IndexException(ErrorCodes.Indexes.TREE_INITIALIZATION_FAILURE); } } #endregion #region Transitory Tree Initialization var _transitoryTreeOps = new BPlusTree <AttributeValue, IndexOp <long> > .Options(AttributeValueSerializer.Global, IndexOpSerializer <long> .Global); _transitoryTreeOps.StorageType = StorageType.Memory; _transitoryTreeOps.LockingFactory = new LockFactory <ReaderWriterLocking>(); _transitoryTreeOps.CallLevelLock = new ReaderWriterLocking(); _transitionTree = new BPlusTree <AttributeValue, IndexOp <long> >(_transitoryTreeOps); opsToCommit = new ConcurrentDictionary <long, IList <IndexOp <long> > >(); appliedOps = new ClusteredList <long>(); #endregion _bounds = new BoundingBox(1); try { RestoreBoundsFromTree(); } catch (Exception ex) { if (LoggerManager.Instance.IndexLogger != null) { LoggerManager.Instance.IndexLogger.Error("BPlusIndex", "Error: " + ErrorCodes.Indexes.NUMERIC_BOUNDS_CALCULATION_FAILURE + " - Failed to calculate numeric bounds of Index for attribute(s) " + _indexKey.ToString() + Environment.NewLine + ex); throw new IndexException(ErrorCodes.Indexes.NUMERIC_BOUNDS_CALCULATION_FAILURE); } } }
public void TestBenchmarkWriteSpeed() { //Write 2,147,483,776 bytes in: 00:02:09.7934237 (in chunks of 128 bytes) //Write 4,295,032,832 bytes in: 00:00:18.4990581 (in chunks of 65536 bytes) //Logged 2,398,000,000 bytes in: 00:00:36.7621027 string newpath = Path.Combine(@"C:\Temp\LogTest\", Guid.NewGuid() + ".tmp"); using (TempFile tmp = TempFile.Attach(newpath)) { byte[] bytes; DateTime start; //bytes = new byte[128]; //new Random().NextBytes(bytes); //start = DateTime.UtcNow; //using (var io = new FileStream(tmp.TempPath, FileMode.Append, FileAccess.Write, FileShare.Read, 8)) //{ // for (int i = 0; i <= 16777216; i++) // io.Write(bytes, 0, 128); //} //Console.WriteLine("Write {0:n0} bytes in: {1}", tmp.Length, DateTime.UtcNow - start); //tmp.Delete(); var options = new TransactionLogOptions<Guid, byte[]>( tmp.TempPath, PrimitiveSerializer.Guid, PrimitiveSerializer.Bytes) { FileBuffer = ushort.MaxValue, FileOptions = FileOptions.None | FileOptions.SequentialScan, }; Guid[] ids = new Guid[1000000]; for (int i = 0; i < ids.Length; i++) ids[i] = Guid.NewGuid(); bytes = new byte[100]; new Random().NextBytes(bytes); start = DateTime.UtcNow; using (var log = new TransactionLog<Guid, byte[]>(options)) { foreach(Guid id in ids) { var token = log.BeginTransaction(); for (int i = 0; i < 20; i++) log.AddValue(ref token, id, bytes); log.CommitTransaction(ref token); } } Console.WriteLine("Logged {0:n0} bytes in: {1}", tmp.Length, DateTime.UtcNow - start); } }