public void CouldWriteToStorage() { var repo = new SeriesStorage(SeriesStorage.GetDefaultConnectionString("../StorageTests.db")); var test = new SortedMap <DateTime, double>(); for (int i = 0; i < 10; i++) { test.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } test.Complete(); foreach (var kvp in test.Map(x => (decimal)x)) { Console.WriteLine($"{kvp.Key} - {kvp.Key.Kind} - {kvp.Value}"); } var storageSeries = repo.GetPersistentOrderedMap <DateTime, decimal>("test_series_CouldWriteToStorage"); var test2 = storageSeries.ToSortedMap(); foreach (var kvp in test2) { Console.WriteLine($"{kvp.Key} - {kvp.Key.Kind} - {kvp.Value}"); } storageSeries.Append(test.Map(x => (decimal)x), AppendOption.RequireEqualOverlap); storageSeries.Flush(); }
private static void Main2(string[] args) { // from Ractor.Persistence var store = new SeriesStorage(SeriesStorage.GetDefaultConnectionString("TAQSample.db")); var aapl = store.GetPersistentOrderedMap <DateTime, TaqTrade>("aapl").Map(t => t.TradePrice / 10000.0); Console.WriteLine("Count: " + aapl.Count()); Console.WriteLine("Open: " + aapl.First.Value); Console.WriteLine("High: " + aapl.Values.Max()); Console.WriteLine("Low: " + aapl.Values.Min()); Console.WriteLine("Close: " + aapl.Last.Value); Console.WriteLine("Average price: " + aapl.Values.Average()); Console.WriteLine("Total volume: " + aapl.Values.Sum()); //https://uk.finance.yahoo.com/q/hp?s=AAPL&b=5&a=07&c=2015&e=5&d=07&f=2015&g=d var msft = store.GetPersistentOrderedMap <DateTime, TaqTrade>("msft").Map(t => t.TradePrice / 10000.0);; var spread = (aapl.Repeat() / msft.Repeat() - 1.0).ToSortedMap(); Console.ReadLine(); }
public void CouldAddValuesByKey() { var repo = new SeriesStorage(SeriesStorage.GetDefaultConnectionString("../StorageTests.db")); var series = repo.GetPersistentOrderedMap <DateTime, decimal>("test_series_CouldAddValuesByKey"); var test2 = series.Map(x => (double)x); series.RemoveAll(); //series.RemoveMany(DateTime.Today.AddHours(-6), Lookup.GE); for (int i = 0; i < 10; i++) { series.Add(DateTime.Today.AddMinutes(i), i); } series.Flush(); for (int i = 10; i < 100; i++) { series[DateTime.Today.AddMinutes(i)] = i; } series.Flush(); series[DateTime.Today.AddMinutes(100)] = 100; Console.WriteLine(test2.Last.Key + " " + test2.Last.Key); series[DateTime.Today.AddMinutes(1000)] = 1000; }
static unsafe void Main(string[] args) { GC.Collect(3, GCCollectionMode.Forced, true); var store = new SeriesStorage(SeriesStorage.GetDefaultConnectionString("TAQSample2.db")); var date = new DateTime(2015, 8, 5); var tsize = Marshal.SizeOf(typeof(TaqTrade)); Console.WriteLine(tsize); var zip = ZipFile.OpenRead(path); var stream = zip.Entries.Single().Open(); var seriesDictionary = new Dictionary <string, IPersistentOrderedMap <DateTime, TaqTrade> >(); using (BufferedStream bs = new BufferedStream(stream, 2 * 1024 * 1024)) using (var reader = new StreamReader(bs, Encoding.ASCII)) { byte[] compressedBuffer = null; var byteBuffer = new byte[106]; int len; var line = reader.ReadLine(); len = bs.ReadLineIntoBuffer(byteBuffer); Console.WriteLine(line); Console.WriteLine("Press enter to continue"); Console.ReadLine(); var sw = new Stopwatch(); sw.Start(); var c = 0; while ((len = bs.ReadLineIntoBuffer(byteBuffer)) != 0) // && c < 100 { var fb = new FixedBuffer(byteBuffer, 0, len); var trade = new TaqTrade(date, fb); var symbol = trade.Symbol.ToLowerInvariant().Trim(); IPersistentOrderedMap <DateTime, TaqTrade> series; if (!seriesDictionary.TryGetValue(symbol, out series)) { series = store.GetPersistentOrderedMap <DateTime, TaqTrade>(symbol); seriesDictionary[symbol] = series; } series[trade.Time] = trade; c++; if (c % 100000 == 0) { Console.WriteLine($"Read so far: {c}"); foreach (var s in seriesDictionary) { s.Value.Flush(); } } } sw.Stop(); foreach (var series in seriesDictionary) { series.Value.Flush(); } Console.WriteLine($"Lines read: ${c} in msecs: {sw.ElapsedMilliseconds}"); } Console.WriteLine("Finished"); GC.Collect(3, GCCollectionMode.Forced, true); Console.WriteLine($"Total memory: {GC.GetTotalMemory(true)}"); Console.ReadLine(); }
public void CouldCRUDSeriesStorage() { var storage = new SeriesStorage("Filename=../benchmark.db"); // SeriesStorage.Default; var timeseries = storage.GetPersistentOrderedMap <DateTime, double>("test_timeseries"); Console.WriteLine(storage.Connection.DataSource); var start = DateTime.UtcNow; Console.WriteLine($"Started at: {start}"); if (!timeseries.IsEmpty) { // Remove all values timeseries.RemoveMany(timeseries.First.Key, Lookup.GE); } var sw = new Stopwatch(); var count = 10000000L; Console.WriteLine($"Count: {count}"); var date = DateTime.UtcNow.Date; var rng = new Random(); sw.Start(); for (long i = 0; i < count; i++) { timeseries.Add(date, Math.Round(i + rng.NextDouble(), 2)); date = date.AddTicks(rng.Next(1, 100)); if (i % 1000000 == 0) { var msec = (DateTime.UtcNow - start).TotalMilliseconds; var mops = i * 0.001 / msec; Console.WriteLine($"Wrote: {i} - {Math.Round((i * 1.0) / (count * 1.0), 4) * 100.0}% in {msec/1000} sec, Mops: {mops}"); } } timeseries.Flush(); Console.WriteLine($"Wrote: {count} - 100%"); Console.WriteLine($"Finished at: {DateTime.UtcNow}"); sw.Stop(); Console.WriteLine($"Writes, Mops: {count * 0.001 / sw.ElapsedMilliseconds}"); sw.Restart(); var sum = 0.0; var storage2 = new SeriesStorage("Filename=../benchmark.db"); // $"Filename={Path.Combine(Bootstrap.Bootstrapper.Instance.DataFolder, "default.db")}"); var timeseries2 = storage2.GetPersistentOrderedMap <DateTime, double>("test_timeseries"); foreach (var kvp in timeseries2) { sum += kvp.Value; } Assert.IsTrue(sum > 0); sw.Stop(); Console.WriteLine($"Reads, Mops: {count * 0.001 / sw.ElapsedMilliseconds}"); var _connection = new SqliteConnection("Filename=../benchmark.db"); //$"Filename={Path.Combine(Bootstrap.Bootstrapper.Instance.DataFolder, "default.db")}"); var sqlCount = _connection.ExecuteScalar <long>($"SELECT sum(count) FROM {storage.ChunkTableName} where id = (SELECT id from {storage.IdTableName} where TextId = 'test_timeseries'); "); Console.WriteLine($"Count in SQLite: {sqlCount}"); Assert.AreEqual(count, sqlCount); var sqlSize = _connection.ExecuteScalar <long>($"SELECT sum(length(ChunkValue)) FROM {storage.ChunkTableName} where id = (SELECT id from {storage.IdTableName} where TextId = 'test_timeseries'); "); Console.WriteLine($"Memory size: {count * 16L}; SQLite net blob size: {sqlSize}; comp ratio: {Math.Round(count * 16.0 / sqlSize * 1.0, 2)}"); }