public void TestSmall() { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); using (SortedTreeFile af = SortedTreeFile.CreateInMemory()) using (SortedTreeTable <HistorianKey, HistorianValue> file = af.OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { using (SortedTreeTableEditor <HistorianKey, HistorianValue> edit = file.BeginEdit()) { for (int x = 0; x < 10000000; x++) { key.Timestamp = (ulong)x; edit.AddPoint(key, value); } edit.Commit(); } using (SortedTreeTableReadSnapshot <HistorianKey, HistorianValue> read = file.BeginRead()) using (SortedTreeScannerBase <HistorianKey, HistorianValue> scan = read.GetTreeScanner()) { int count = 0; scan.SeekToStart(); while (scan.Read(key, value)) { count++; } System.Console.WriteLine(count.ToString()); } } }
public void TestBulkRolloverFile() { Stats.LookupKeys = 0; DiskIoSession.ReadCount = 0; DiskIoSession.WriteCount = 0; Stats.ChecksumCount = 0; DiskIoSession.Lookups = 0; DiskIoSession.CachedLookups = 0; long cnt; Stopwatch sw = new Stopwatch(); sw.Start(); //using (SortedTreeTable<HistorianKey, HistorianValue> af = SortedTreeFile.CreateInMemory(4096).OpenOrCreateTable<HistorianKey, HistorianValue>(SortedTree.FixedSizeNode)) using (SortedTreeTable <HistorianKey, HistorianValue> af = SortedTreeFile.CreateInMemory(blockSize: 4096).OpenOrCreateTable <HistorianKey, HistorianValue>(HistorianFileEncodingDefinition.TypeGuid)) { using (SortedTreeTableEditor <HistorianKey, HistorianValue> edit = af.BeginEdit()) { edit.AddPoints(new PointStreamSequentialPoints(1, 20000000)); edit.Commit(); } sw.Stop(); cnt = af.Count(); System.Console.WriteLine(cnt); } System.Console.WriteLine((float)(20 / sw.Elapsed.TotalSeconds)); }
public void EnduranceTest() { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); using (SortedTreeTable <HistorianKey, HistorianValue> target = SortedTreeFile.CreateInMemory().OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { for (uint x = 0; x < 100; x++) { using (SortedTreeTableEditor <HistorianKey, HistorianValue> fileEditor = target.BeginEdit()) { for (int y = 0; y < 10; y++) { key.Timestamp = x; key.PointID = x; value.Value1 = x; value.Value3 = x; fileEditor.AddPoint(key, value); x++; } fileEditor.Commit(); } Assert.AreEqual(target.FirstKey.Timestamp, 0); Assert.AreEqual(target.LastKey.Timestamp, x - 1); } } }
public void TestCompressed() { int count = 10000000; Stopwatch sw = new Stopwatch(); using (SortedTreeFile af = SortedTreeFile.CreateInMemory()) using (SortedTreeTable <AmiKey, AmiValue> table = af.OpenOrCreateTable <AmiKey, AmiValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { using (SortedTreeTableEditor <AmiKey, AmiValue> edit = table.BeginEdit()) { sw.Start(); AmiKey key = new AmiKey(); AmiValue value = new AmiValue(); for (int x = 0; x < count; x++) { key.Timestamp = (uint)x; edit.AddPoint(key, value); } edit.Commit(); sw.Stop(); Console.WriteLine(af.ArchiveSize / 1024.0 / 1024.0); Console.WriteLine(count / sw.Elapsed.TotalSeconds / 1000000); } } }
SortedTreeTable <HistorianKey, HistorianValue> CreateTable() { SortedTreeFile file = SortedTreeFile.CreateInMemory(); SortedTreeTable <HistorianKey, HistorianValue> table = file.OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding); return(table); }
private TestResults TestRandom(int pageSize, uint count) { //StringBuilder sb = new StringBuilder(); Random R = new Random(1); HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); DiskIoSession.ReadCount = 0; DiskIoSession.WriteCount = 0; Stats.ChecksumCount = 0; DiskIoSession.Lookups = 0; DiskIoSession.CachedLookups = 0; Stopwatch sw = new Stopwatch(); sw.Start(); using (SortedTreeFile af = SortedTreeFile.CreateInMemory(blockSize: pageSize)) using (SortedTreeTable <HistorianKey, HistorianValue> af2 = af.OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { uint pointPairs = count / 5000; for (uint i = 0; i < pointPairs; i++) { uint max = i * 5000 + 5000; using (var edit = af2.BeginEdit()) { for (ulong x = i * 5000; x < max; x++) { key.Timestamp = (uint)R.Next(); key.PointID = 2 * x; value.Value3 = 3 * x; value.Value1 = 4 * x; //if ((x % 100) == 0) // sb.AppendLine(x + "," + DiskIoSession.ReadCount + "," + DiskIoSession.WriteCount); //if (x == 1000) // DiskIoSession.BreakOnIO = true; edit.AddPoint(key, value); //edit.AddPoint(uint.MaxValue - x, 2 * x, 3 * x, 4 * x); } edit.Commit(); } } //long cnt = af.Count(); } sw.Stop(); //File.WriteAllText(@"C:\temp\" + pageSize + ".csv",sb.ToString()); return(new TestResults() { PageSize = pageSize, Rate = (float)(count / sw.Elapsed.TotalSeconds / 1000000), ReadCount = DiskIoSession.ReadCount, WriteCount = DiskIoSession.WriteCount, ChecksumCount = Stats.ChecksumCount, Lookups = DiskIoSession.Lookups, CachedLookups = DiskIoSession.CachedLookups }); }
public void AddPointTest() { using (SortedTreeTable <HistorianKey, HistorianValue> target = SortedTreeFile.CreateInMemory().OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { using (SortedTreeTableEditor <HistorianKey, HistorianValue> fileEditor = target.BeginEdit()) { fileEditor.AddPoint(new HistorianKey(), new HistorianValue()); fileEditor.Commit(); } } }
public void CreateSnapshotTest() { HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); key.Timestamp = 1; key.PointID = 2; value.Value1 = 3; value.Value2 = 4; using (SortedTreeTable <HistorianKey, HistorianValue> target = SortedTreeFile.CreateInMemory().OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { ulong date = 1; ulong pointId = 2; ulong value1 = 3; ulong value2 = 4; SortedTreeTableSnapshotInfo <HistorianKey, HistorianValue> snap1; using (SortedTreeTableEditor <HistorianKey, HistorianValue> fileEditor = target.BeginEdit()) { fileEditor.AddPoint(key, value); key.Timestamp++; fileEditor.AddPoint(key, value); snap1 = target.AcquireReadSnapshot(); fileEditor.Commit(); } SortedTreeTableSnapshotInfo <HistorianKey, HistorianValue> snap2 = target.AcquireReadSnapshot(); using (SortedTreeTableReadSnapshot <HistorianKey, HistorianValue> instance = snap1.CreateReadSnapshot()) { SortedTreeScannerBase <HistorianKey, HistorianValue> scanner = instance.GetTreeScanner(); scanner.SeekToStart(); Assert.AreEqual(false, scanner.Read(key, value)); } using (SortedTreeTableReadSnapshot <HistorianKey, HistorianValue> instance = snap2.CreateReadSnapshot()) { SortedTreeScannerBase <HistorianKey, HistorianValue> scanner = instance.GetTreeScanner(); scanner.SeekToStart(); Assert.AreEqual(true, scanner.Read(key, value)); Assert.AreEqual(1uL, key.Timestamp); Assert.AreEqual(2uL, key.PointID); Assert.AreEqual(3uL, value.Value1); Assert.AreEqual(4uL, value.Value2); } Assert.AreEqual(1uL, target.FirstKey.Timestamp); Assert.AreEqual(2uL, target.LastKey.Timestamp); } }
/// <summary> /// Creates a new <see cref="SortedTreeTable{TKey,TValue}"/> based on the settings passed to this class. /// Once created, it is up to he caller to make sure that this class is properly disposed of. /// </summary> /// <param name="estimatedSize">The estimated size of the file. -1 to ignore this feature and write to the first available directory.</param> /// <returns></returns> public SortedTreeTable <TKey, TValue> CreateArchiveFile(long estimatedSize = -1) { using (m_lock.EnterReadLock()) { if (m_settings.IsMemoryArchive) { SortedTreeFile af = SortedTreeFile.CreateInMemory(blockSize: 4096, flags: m_settings.Flags.ToArray()); return(af.OpenOrCreateTable <TKey, TValue>(m_settings.EncodingMethod)); } else { string fileName = CreateArchiveName(GetPathWithEnoughSpace(estimatedSize)); SortedTreeFile af = SortedTreeFile.CreateFile(fileName, blockSize: 4096, flags: m_settings.Flags.ToArray()); return(af.OpenOrCreateTable <TKey, TValue>(m_settings.EncodingMethod)); } } }
public void BenchmarkWriteSpeed() { DebugStopwatch sw = new DebugStopwatch(); double time; double count = 0; using (SortedTreeFile file = SortedTreeFile.CreateInMemory()) { var table = file.OpenOrCreateTable <HistorianKey, HistorianValue>(HistorianFileEncodingDefinition.TypeGuid); HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); time = sw.TimeEvent(() => { //TreeKeyMethodsBase<HistorianKey>.ClearStats(); //TreeValueMethodsBase<HistorianKey>.ClearStats(); count = 0; using (var scan = table.BeginEdit()) { for (uint x = 0; x < 10000000; x++) { key.PointID = x; scan.AddPoint(key, value); count++; } scan.Rollback(); } }); } Console.WriteLine((count / 1000000 / time).ToString() + " Million PPS"); //Console.WriteLine("KeyMethodsBase calls"); //for (int x = 0; x < 23; x++) //{ // Console.WriteLine(TreeKeyMethodsBase<HistorianKey>.CallMethods[x] + "\t" + ((TreeKeyMethodsBase<HistorianKey>.Method)(x)).ToString()); //} //Console.WriteLine("ValueMethodsBase calls"); //for (int x = 0; x < 5; x++) //{ // Console.WriteLine(TreeValueMethodsBase<HistorianValue>.CallMethods[x] + "\t" + ((TreeValueMethodsBase<HistorianValue>.Method)(x)).ToString()); //} }
public void TestRollover() { int count = 1000000; Stopwatch sw = new Stopwatch(); using (SortedTreeFile af = SortedTreeFile.CreateInMemory()) using (SortedTreeTable <AmiKey, AmiKey> table = af.OpenOrCreateTable <AmiKey, AmiKey>(EncodingDefinition.FixedSizeCombinedEncoding)) { using (SortedTreeTableEditor <AmiKey, AmiKey> edit = table.BeginEdit()) { sw.Start(); AmiKey key = new AmiKey(); AmiKey value = new AmiKey(); for (int x = 0; x < count; x++) { key.Timestamp = (uint)x; edit.AddPoint(key, value); } edit.Commit(); sw.Stop(); Console.WriteLine(af.ArchiveSize / 1024.0 / 1024.0); Console.WriteLine(count / sw.Elapsed.TotalSeconds / 1000000); } using (SortedTreeFile af2 = SortedTreeFile.CreateInMemory()) using (SortedTreeTable <AmiKey, AmiKey> table2 = af2.OpenOrCreateTable <AmiKey, AmiKey>(new EncodingDefinition(EncodingDefinition.FixedSizeCombinedEncoding.KeyValueEncodingMethod, EncodingDefinition.FixedSizeCombinedEncoding.KeyValueEncodingMethod))) using (SortedTreeTableEditor <AmiKey, AmiKey> edit = table2.BeginEdit()) { using (SortedTreeTableReadSnapshot <AmiKey, AmiKey> read = table.BeginRead()) using (SortedTreeScannerBase <AmiKey, AmiKey> scan = read.GetTreeScanner()) { scan.SeekToStart(); edit.AddPoints(scan); } } Console.WriteLine(count); } }
private TestResults Test(int pageSize) { Stats.LookupKeys = 0; DiskIoSession.ReadCount = 0; DiskIoSession.WriteCount = 0; Stats.ChecksumCount = 0; DiskIoSession.Lookups = 0; DiskIoSession.CachedLookups = 0; long cnt; Stopwatch sw = new Stopwatch(); Stopwatch sw2 = new Stopwatch(); sw.Start(); using (SortedTreeTable <HistorianKey, HistorianValue> af = SortedTreeFile.CreateInMemory(blockSize: pageSize).OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { using (SortedTreeTableEditor <HistorianKey, HistorianValue> edit = af.BeginEdit()) { for (int x = 0; x < 100; x++) { edit.AddPoints(new PointStreamSequential(x * 10000, 10000)); } edit.Commit(); } sw.Stop(); sw2.Start(); cnt = af.Count(); sw2.Stop(); } return(new TestResults() { Count = cnt, PageSize = pageSize, RateWrite = (float)(1 / sw.Elapsed.TotalSeconds), RateRead = (float)(1 / sw2.Elapsed.TotalSeconds), ReadCount = DiskIoSession.ReadCount, WriteCount = DiskIoSession.WriteCount, ChecksumCount = Stats.ChecksumCount, Lookups = DiskIoSession.Lookups, CachedLookups = DiskIoSession.CachedLookups }); }
public void TestFixed() { int count = 10000000; Stopwatch sw = new Stopwatch(); using (var af = SortedTreeFile.CreateInMemory()) using (var table = af.OpenOrCreateTable <AmiKey, AmiKey>(EncodingDefinition.FixedSizeCombinedEncoding)) { var key = new AmiKey(); var value = new AmiKey(); using (var edit = table.BeginEdit()) { sw.Start(); for (int x = 0; x < count; x++) { key.Timestamp = (uint)x; edit.AddPoint(key, value); } edit.Commit(); sw.Stop(); Console.WriteLine(af.ArchiveSize / 1024.0 / 1024.0); Console.WriteLine(count / sw.Elapsed.TotalSeconds / 1000000); } using (var read = table.BeginRead()) using (var scan = read.GetTreeScanner()) { scan.SeekToStart(); while (scan.Read(key, value)) { count--; } } Console.WriteLine(count); } }
SortedTreeTable <HistorianKey, HistorianValue> CreateTable() { var r = new Random(seed++); var key = new HistorianKey(); var value = new HistorianValue(); var file = SortedTreeFile.CreateInMemory(); var table = file.OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding); using (var edit = table.BeginEdit()) { for (int x = 0; x < 1000; x++) { key.Timestamp = (ulong)r.Next(); key.PointID = (ulong)r.Next(); key.EntryNumber = (ulong)r.Next(); edit.AddPoint(key, value); } edit.Commit(); } return(table); }
public void PartitionFileConstructorTest() { using (SortedTreeTable <HistorianKey, HistorianValue> target = SortedTreeFile.CreateInMemory().OpenOrCreateTable <HistorianKey, HistorianValue>(EncodingDefinition.FixedSizeCombinedEncoding)) { } }
/// <summary> /// Appends this data to this stage. Also queues up for deletion if necessary. /// </summary> /// <param name="args">arguments handed to this class from either the /// PrestageWriter or another StageWriter of a previous generation</param> /// <remarks> /// This method must be called in a single threaded manner. /// </remarks> public void AppendData(PrebufferRolloverArgs <TKey, TValue> args) { if (m_stopped) { Log.Publish(MessageLevel.Info, "No new points can be added. Point queue has been stopped. Data in rollover will be lost"); return; } if (m_disposed) { Log.Publish(MessageLevel.Info, "First stage writer has been disposed. Data in rollover will be lost"); return; } SortedTreeFile file = SortedTreeFile.CreateInMemory(4096); SortedTreeTable <TKey, TValue> table = file.OpenOrCreateTable <TKey, TValue>(m_settings.EncodingMethod); using (SortedTreeTableEditor <TKey, TValue> edit = table.BeginEdit()) { edit.AddPoints(args.Stream); edit.Commit(); } bool shouldWait = false; //If there is data to write then write it to the current archive. lock (m_syncRoot) { if (m_stopped) { Log.Publish(MessageLevel.Info, "No new points can be added. Point queue has been stopped. Data in rollover will be lost"); table.Dispose(); return; } if (m_disposed) { Log.Publish(MessageLevel.Info, "First stage writer has been disposed. Data in rollover will be lost"); table.Dispose(); return; } using (ArchiveListEditor <TKey, TValue> edit = m_list.AcquireEditLock()) { edit.Add(table); } m_pendingTables1.Add(table); if (m_pendingTables1.Count == 10) { using (UnionTreeStream <TKey, TValue> reader = new UnionTreeStream <TKey, TValue>(m_pendingTables1.Select(x => new ArchiveTreeStreamWrapper <TKey, TValue>(x)), true)) { SortedTreeFile file1 = SortedTreeFile.CreateInMemory(4096); SortedTreeTable <TKey, TValue> table1 = file1.OpenOrCreateTable <TKey, TValue>(m_settings.EncodingMethod); using (SortedTreeTableEditor <TKey, TValue> edit = table1.BeginEdit()) { edit.AddPoints(reader); edit.Commit(); } using (ArchiveListEditor <TKey, TValue> edit = m_list.AcquireEditLock()) { //Add the newly created file. edit.Add(table1); foreach (SortedTreeTable <TKey, TValue> table2 in m_pendingTables1) { edit.TryRemoveAndDelete(table2.ArchiveId); } } m_pendingTables2.Add(table1); m_pendingTables1.Clear(); } } if (m_pendingTables2.Count == 10) { using (UnionTreeStream <TKey, TValue> reader = new UnionTreeStream <TKey, TValue>(m_pendingTables2.Select(x => new ArchiveTreeStreamWrapper <TKey, TValue>(x)), true)) { SortedTreeFile file1 = SortedTreeFile.CreateInMemory(4096); SortedTreeTable <TKey, TValue> table1 = file1.OpenOrCreateTable <TKey, TValue>(m_settings.EncodingMethod); using (SortedTreeTableEditor <TKey, TValue> edit = table1.BeginEdit()) { edit.AddPoints(reader); edit.Commit(); } using (ArchiveListEditor <TKey, TValue> edit = m_list.AcquireEditLock()) { //Add the newly created file. edit.Add(table1); foreach (SortedTreeTable <TKey, TValue> table2 in m_pendingTables2) { edit.TryRemoveAndDelete(table2.ArchiveId); } } m_pendingTables3.Add(table1); m_pendingTables2.Clear(); } } m_lastCommitedSequenceNumber.Value = args.TransactionId; long currentSizeMb = (m_pendingTables1.Sum(x => x.BaseFile.ArchiveSize) + m_pendingTables2.Sum(x => x.BaseFile.ArchiveSize)) >> 20; if (currentSizeMb > m_settings.MaximumAllowedMb) { shouldWait = true; m_rolloverTask.Start(); m_rolloverComplete.Reset(); } else if (currentSizeMb > m_settings.RolloverSizeMb) { m_rolloverTask.Start(); } else { m_rolloverTask.Start(m_settings.RolloverInterval); } } if (SequenceNumberCommitted != null) { SequenceNumberCommitted(args.TransactionId); } if (shouldWait) { Log.Publish(MessageLevel.NA, MessageFlags.PerformanceIssue, "Queue is full", "Rollover task is taking a long time. A long pause on the inputs is about to occur."); m_rolloverComplete.WaitOne(); } }