public void CouldCalculateSMAInRealTime() { var sm = new SortedMap <int, double>(); Task.Run(async() => { for (int i = 0; i < 20; i++) { sm.Add(i, i); } await Task.Delay(100); for (int i = 20; i < 100; i++) { await Task.Delay(1); // 15 msec sm.Add(i, i); } sm.Complete(); }); var sma = sm.SMA(10, true); var c = sma.GetCursor(); while (c.MoveNext(CancellationToken.None).Result) { Console.WriteLine("Key: {0}, value: {1}", c.CurrentKey, c.CurrentValue); } }
public void CouldConsumeMapAsync() { var count = 10; var sm = new SortedMap <int, int>(); var zipMap = sm.Map(x => x * x).Zip(sm.Map(x => x * x), (l, r) => l + r); var t1 = Task.Run(async() => { for (int i = 0; i < count; i++) { await Task.Delay(250); var _ = await sm.TryAdd(i, i); } await sm.Complete(); }); var cnt = 0; var t2 = Task.Run(async() => { var c = zipMap.GetAsyncCursor(); while (await c.MoveNextAsync()) { cnt++; Console.WriteLine($"{c.CurrentKey} - {c.CurrentValue}"); } }); Task.WhenAll(t1, t2).Wait(); Assert.AreEqual(count, cnt); }
public void CouldCompleteObserverBenchmark() { var count = 10000000; var map = new SortedMap <int, int>(count); var subscriber = new SumValuesObserver(true); map.Subscribe(subscriber); var expectedSum = 0; var sw = new Stopwatch(); sw.Start(); for (int i = 0; i < count; i++) { map.Add(i, i); expectedSum += i; } Assert.IsFalse(subscriber.IsCompleted); map.Complete(); Assert.IsTrue(subscriber.Completed.Result); sw.Stop(); Console.WriteLine($"Elapsed: {sw.ElapsedMilliseconds}"); Console.WriteLine($"MOps: {(count * 0.001) / sw.ElapsedMilliseconds}"); Assert.AreEqual(expectedSum, subscriber.Sum); Assert.IsTrue(subscriber.IsCompleted); }
public async Task CouldUseZipNSelector() { var sm1 = new SortedMap <int, double>(); var sm2 = new SortedMap <int, double>(); var sm3 = new SortedMap <int, double>(); double expected = 0; sm1.Add(0, 0); sm2.Add(0, 0); sm3.Add(0, 0); var count = 10_000_000; for (int i = 2; i < count; i++) { expected += i + 2 * i + 3 * i; sm1.Add(i, i); sm2.Add(i, 2 * i); sm3.Add(i, 3 * i); } await sm1.Complete(); await sm2.Complete(); await sm3.Complete(); //var result = new[] {sm1, sm2, sm3}.Zip((int k, Span<double> sp) => { return sp[0] + sp[1] + sp[2]; }); var result = new[] { sm1, sm2, sm3 }.Zip((key, values) => { return(values[0] + values[1] + values[2]); }); // var resultX = result + 1; var sum = 0.0; var c = 0L; for (int r = 0; r < 10; r++) { using (Benchmark.Run("ZipN", count)) { #if NETCOREAPP3_0 // await #endif foreach (var item in result) { sum += item.Value; c++; } } } Benchmark.Dump(); Console.WriteLine(sum); Console.WriteLine("Total " + c.ToString("N")); }
public void CouldCalculateSMAInRealTime() { var sm = new SortedMap<int, double>(); Task.Run(async () => { for (int i = 0; i < 20; i++) { sm.Add(i, i); } await Task.Delay(100); for (int i = 20; i < 100; i++) { await Task.Delay(1); // 15 msec sm.Add(i, i); } sm.Complete(); }); var sma = sm.SMA(10, true); var c = sma.GetCursor(); while (c.MoveNext(CancellationToken.None).Result) { Console.WriteLine("Key: {0}, value: {1}", c.CurrentKey, c.CurrentValue); } }
public void CouldMovePreviousWithoutBatching() { var sm = new SortedMap <DateTime, double>(); var count = 1000; for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } sm.Complete(); var bmvc = new BatchMapValuesCursor <DateTime, double, double>(sm.GetCursor, (v) => v + 1.0); var c = 0; while (c < 500 && bmvc.MoveNext()) { Assert.AreEqual(c + 1.0, bmvc.CurrentValue); c++; } c--; while (bmvc.MovePrevious()) { c--; Assert.AreEqual(c + 1.0, bmvc.CurrentValue); } Assert.AreEqual(0, c); }
public void CouldWriteToStorage() { var repo = new SeriesStorage(SeriesStorage.GetDefaultConnectionString("../StorageTests.db")); var test = new SortedMap <DateTime, double>(); for (int i = 0; i < 10; i++) { test.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } test.Complete(); foreach (var kvp in test.Map(x => (decimal)x)) { Console.WriteLine($"{kvp.Key} - {kvp.Key.Kind} - {kvp.Value}"); } var storageSeries = repo.GetPersistentOrderedMap <DateTime, decimal>("test_series_CouldWriteToStorage"); var test2 = storageSeries.ToSortedMap(); foreach (var kvp in test2) { Console.WriteLine($"{kvp.Key} - {kvp.Key.Kind} - {kvp.Value}"); } storageSeries.Append(test.Map(x => (decimal)x), AppendOption.RequireEqualOverlap); storageSeries.Flush(); }
public void CouldMoveAtWithBatching() { var sm = new SortedMap <DateTime, double>(); var count = 1000; for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } sm.Complete(); var bmvc = new BatchMapValuesCursor <DateTime, double, double>(sm.GetCursor, (v) => v + 1.0, this.IncrementMap); var c = 0; while (c < 500 && bmvc.MoveNext()) { Assert.AreEqual(c + 1.0, bmvc.CurrentValue); c++; } c--; while (bmvc.MoveAt(DateTime.UtcNow.Date.AddSeconds(c), Lookup.EQ)) { Assert.AreEqual(c + 1.0, bmvc.CurrentValue); c--; } Assert.AreEqual(-1, c); }
public void CouldMoveNextAsyncWhenChangingOrder_NoSemaphore() { var cts = new CancellationTokenSource(); var ct = cts.Token; var sm = new SortedMap <int, int>(); sm.IsSynchronized = true; var tcs = new TaskCompletionSource <bool>(); var sumTask = Task.Run(async() => { var c = sm.GetCursor(); tcs.SetResult(true); Assert.IsTrue(await c.MoveNext(ct)); // here we change order Assert.IsTrue(await c.MoveNext(ct)); Assert.IsFalse(await c.MoveNext(ct)); }); tcs.Task.Wait(ct); sm.Add(1, 1); Thread.Sleep(100); //sm.Add(0, 0); // will through OOO sm.Add(2, 2); //sm.Add(3, 3); sm.Complete(); sumTask.Wait(ct); }
/// <summary> /// Get history of offsets with keys as zoned time. Used to convert from zoned to UTC time. /// </summary> /// <returns></returns> public static SortedMap <DateTime, long> GetOffsetsFromZoned(string tzFrom, bool standardOffsetOnly = false) { string tz; if (!Normalizer.TryGetValue(tzFrom.ToLowerInvariant(), out tz)) { tz = tzFrom; } var sortedMap = new SortedMap <DateTime, long>(); if (tz.ToLowerInvariant() == "utc") { sortedMap.Set(new DateTime(0L, DateTimeKind.Unspecified), 0); } else { var givenTz = DateTimeZoneProviders.Tzdb[tz]; var intervals = givenTz.GetZoneIntervals(Instant.FromDateTimeUtc( // https://en.wikipedia.org/wiki/International_Meridian_Conference new DateTime(1884, 10, 22, 12, 0, 0, DateTimeKind.Utc) ), Instant.MaxValue); foreach (var interval in intervals) { var localStart = interval.IsoLocalStart.ToDateTimeUnspecified(); var offset = standardOffsetOnly ? interval.StandardOffset : interval.WallOffset; var offsetTicks = offset.Ticks; sortedMap.TryAddLast(localStart, offsetTicks); } } sortedMap.Complete(); return(sortedMap); }
public async Task MNATest() { var sm = new SortedMap <int, int>(); var count = 1_0; var sum = 0; using (Benchmark.Run("MNA")) { var _ = Task.Run(() => { for (int i = 0; i < count; i++) { sm.TryAddLast(i, i); } sm.Complete(); }); var c = sm.GetCursor(); while (await c.MoveNextAsync()) { sum += c.CurrentValue; } } Assert.IsTrue(sum > 0); Benchmark.Dump(); }
public void ContinuousZipIsCorrectByRandomCheckBenchmark() { for (int r = 0; r < 10; r++) { var sm1 = new SortedMap <int, int>(); var sm2 = new SortedMap <int, int>(); var rng = new Random(r); var prev1 = 0; var prev2 = 0; for (int i = 0; i < 100000; i = i + 1) { prev1 = prev1 + rng.Next(1, 11); sm1.Add(prev1, prev1); prev2 = prev2 + rng.Next(1, 11); sm2.Add(prev2, prev2); } sm1.Complete(); sm2.Complete(); ContinuousZipIsCorrectByRandomCheck(sm1, sm2, r); } Benchmark.Dump(); }
public void CouldMoveNextWithoutBatching() { var sm = new SortedMap <DateTime, double>(); var count = 1000; for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } sm.Complete(); var bmvc = new BatchMapValuesCursor <DateTime, double, double>(sm.GetCursor, (v) => v + 1.0); var c = 0; while (c < 500 && bmvc.MoveNext()) { Assert.AreEqual(c + 1.0, bmvc.CurrentValue); c++; } while (bmvc.MoveNext(CancellationToken.None).Result) // Setting IsMutable to false allows us to skip this check: c < 1000 && { Assert.AreEqual(c + 1.0, bmvc.CurrentValue); c++; } Assert.AreEqual(count, c); }
public void CouldCloneZipLagSeries() { var count = 1000; var sm = new SortedMap <int, double>(); for (int i = 0; i < count; i++) { sm.Add(i, i); } // slow implementation var sw = new Stopwatch(); sw.Start(); var zipLag = sm.ZipLag(1, (cur, prev) => cur + prev); //.ToSortedMap(); var zc = zipLag.GetCursor(); zc.MoveNext(); var zc2 = zc.Clone(); Assert.AreEqual(zc.CurrentKey, zc2.CurrentKey); zc.MoveNext(); zc2.MoveNext(); Assert.AreEqual(zc.CurrentKey, zc2.CurrentKey); zc.MovePrevious(); zc2.MovePrevious(); Assert.AreEqual(zc.CurrentKey, zc2.CurrentKey); for (int i = 1; i < count; i++) { var expected = i + i - 1; double actual; var ok = zc.TryGetValue(i, out actual); Assert.AreEqual(expected, actual); } var sm2 = new SortedMap <int, double>(); var zc3 = sm2.ZipLag(1, (cur, prev) => cur + prev).GetCursor(); var t = Task.Run(async() => { var c = 1; // first key is missing because we cannot create state at it while (await zc3.MoveNext(CancellationToken.None)) { var expected = c + c - 1; Assert.AreEqual(expected, zc3.CurrentValue); c++; } }); for (int i = 0; i < count; i++) { sm2.Add(i, i); } sm2.Complete(); // without it MoveNextAsync will wait forever t.Wait(); }
public async void RangeOnVariantSeriesWorks() { var sm = new SortedMap <DateTime, int>(); for (int i = 0; i < 100; i++) { sm.Add(DateTime.Today.AddSeconds(i), i); } var vs = new VariantSeries <DateTime, int>(sm); Assert.IsTrue(!vs.Comparer.Equals(KeyComparer <Variant> .Default)); var rs = vs.After(Variant.Create(DateTime.Today.AddSeconds(50))); Assert.IsTrue(!rs.Comparer.Equals(KeyComparer <Variant> .Default)); var expected = 0; for (int i = 50; i < 100; i++) { expected += i; } var sum = 0; foreach (var variantKvp in rs) { sum += variantKvp.Value.Get <int>(); } Assert.AreEqual(expected, sum); var t = Task.Run(async() => { try { for (int i = 100; i < 150; i++) { sm.Add(DateTime.Today.AddSeconds(i), i); await Task.Delay(1); } sm.Complete(); } catch (Exception ex) { Console.WriteLine(ex.Message); } }); var c = rs.GetCursor(); while (await c.MoveNext(CancellationToken.None)) { sum += c.CurrentValue.Get <int>(); } }
public void MoveNextAsyncBenchmark() { // this benchmark shows that simple async enumeration gives 13+ mops, // this means than we should use parallel enumeration on joins. // the idea is that a chain of calculations could be somewhat heavy, e.g. rp(ma(log(zipLag(c p -> c/p)))) // but they are optimized for single thread: when movenext is called on the outer cursor, // the whole chain enumerates synchronously. // During joins, we must make these evaluations parallel. The net overhead of tasks over existing data is visible // but not too big, while on real-time stream there is no alternative at all. // Join algos should be paralell and task-based by default var count = 10000000; var sw = new Stopwatch(); var sm = new SortedMap <DateTime, double>(); for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } sm.Complete(); sw.Start(); double sum = 0.0; var c = sm.GetCursor(); Task.Run(async() => { while (await c.MoveNext(CancellationToken.None)) { sum += c.CurrentValue; } }).Wait(); sw.Stop(); double expectedSum = 0.0; for (int i = 0; i < count; i++) { expectedSum += i; } Assert.AreEqual(expectedSum, sum); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); Console.WriteLine("Ops: {0}", Math.Round(0.000001 * count * 1000.0 / (sw.ElapsedMilliseconds * 1.0), 2)); }
public void CouldAddWitDefaultMathProvider() { var sm = new SortedMap <DateTime, double>(4000); var count = 10000; for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } sm.Complete(); var sw = new Stopwatch(); sw.Start(); var sum = 0.0; for (int rounds = 0; rounds < 1000; rounds++) { var bmvc = new BatchMapValuesCursor <DateTime, double, double>(sm.GetCursor, (v) => { //Thread.SpinWait(50); //var fakeSum = 0; //for (int i = 0; i < 100; i++) { // fakeSum += i; //} //fakeSum = 0; return(v + 3.1415926); }, MathProviderSample); // //var bmvc = new MapCursor<DateTime, double, double>(sm.GetCursor, (k,v) => Math.Log(v)) as ICursor<DateTime, double>; // while (bmvc.MoveNext()) { sum += bmvc.CurrentValue; } } sw.Stop(); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); Console.WriteLine("Ops: {0}", Math.Round(0.000001 * count * 1000 * 1000.0 / (sw.ElapsedMilliseconds * 1.0), 2)); Console.WriteLine(sum); var c = 0; //foreach (var kvp in sm2) { // Assert.AreEqual(c + 1, kvp.Value); // c++; //} }
public void CouldCalculateComplexGraph() { // TODO! need real complex data to test properly var sm = new SortedMap <DateTime, double>(); var dataTask = Task.Run(async() => { for (int i = 0; i < 1000; i++) { sm.Add(DateTime.Today.AddSeconds(i), i + 10000); await Task.Delay(25); } sm.Complete(); }); Thread.Sleep(50); var closeSeries = sm; var baseLeverage = 1.0; var sma = closeSeries.SMA(20, true); var deviation = sma / closeSeries - 1.0; var leverage = (baseLeverage * (-(5.0 * (deviation.Map(x => Math.Abs(x)))) + 1.0)); var smaSignal = deviation.Map(x => (double)(Math.Sign(x))); var smaPositionMultiple = ((smaSignal * leverage).Map(x => 0.25 * (Math.Round(x / 0.25)))); var smaPositionMultipleMap = smaPositionMultiple.ToSortedMap(); var traderTask = Task.Run(async() => { var positionCursor = smaPositionMultiple.GetCursor(); while (await positionCursor.MoveNext(CancellationToken.None)) // { await Task.Delay(15); Console.WriteLine("Time: {0}, position: {1}", positionCursor.CurrentKey, positionCursor.CurrentValue); } }); dataTask.Wait(); traderTask.Wait(); }
public void CouldCalculateComplexGraph() { // TODO! need real complex data to test properly var sm = new SortedMap<DateTime, double>(); var dataTask = Task.Run(async () => { for (int i = 0; i < 1000; i++) { sm.Add(DateTime.Today.AddSeconds(i), i+10000); await Task.Delay(25); } sm.Complete(); }); Thread.Sleep(50); var closeSeries = sm; var baseLeverage = 1.0; var sma = closeSeries.SMA(20, true); var deviation = sma / closeSeries - 1.0; var leverage = (baseLeverage * (-(5.0 * (deviation.Map(x => Math.Abs(x)))) + 1.0)); var smaSignal = deviation.Map(x => (double)(Math.Sign(x))); var smaPositionMultiple = ((smaSignal * leverage).Map(x => 0.25 * (Math.Round(x / 0.25)))); var smaPositionMultipleMap = smaPositionMultiple.ToSortedMap(); var traderTask = Task.Run(async () => { var positionCursor = smaPositionMultiple.GetCursor(); while (await positionCursor.MoveNext(CancellationToken.None)) // { await Task.Delay(15); Console.WriteLine("Time: {0}, position: {1}", positionCursor.CurrentKey, positionCursor.CurrentValue); } }); dataTask.Wait(); traderTask.Wait(); }
public void DiscreteZipIsCorrectByRandomCheck() { var sm1 = new SortedMap <int, int>(); var sm2 = new SortedMap <int, int>(); var rng = new Random(9); var prev1 = 0; var prev2 = 0; for (int i = 0; i < 1000; i = i + 1) { prev1 = prev1 + rng.Next(1, 11); sm1.Add(prev1, prev1); prev2 = prev2 + rng.Next(1, 11); sm2.Add(prev2, prev2); } sm1.Complete(); sm2.Complete(); DiscreteZipIsCorrectByRandomCheck(sm1, sm2, 9); }
public void CouldMoveNextAsyncWithBatching() { var sm = new SortedMap <DateTime, double>(); var count = 1000; for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } Task.Run(() => { Thread.Sleep(1000); for (int i = count; i < count * 2; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); //Thread.Sleep(50); } sm.Complete(); // stop mutating //Console.WriteLine("Set immutable"); }); var bmvc = new BatchMapValuesCursor <DateTime, double, double>(sm.GetCursor, (v) => v * 10.0, this.MultiplyMap); var c = 0; while (c < 5 && bmvc.MoveNext()) { Assert.AreEqual(c * 10.0, bmvc.CurrentValue); c++; } while (bmvc.MoveNext(CancellationToken.None).Result) { Assert.AreEqual(c * 10.0, bmvc.CurrentValue); Console.WriteLine("Value: " + bmvc.CurrentValue); c++; } Assert.AreEqual(2 * count, c); }
public async Task CouldEnumerateSMUsingCursor() { var map = new SortedMap <int, int>(); var count = 1_000_000; for (int i = 0; i < count; i++) { await map.TryAdd(i, i); } #pragma warning disable HAA0401 // Possible allocation of reference type enumerator var ae = map.GetAsyncEnumerator(); #pragma warning restore HAA0401 // Possible allocation of reference type enumerator var t = Task.Run(async() => { using (Benchmark.Run("SCM.AsyncEnumerator", count)) { var cnt = 0; while (await ae.MoveNextAsync()) { cnt++; } await ae.DisposeAsync(); Assert.AreEqual(count * 2, cnt); } Benchmark.Dump(); }); for (int i = count; i < count * 2; i++) { await map.TryAdd(i, i); } await map.Complete(); t.Wait(); }
public async Task CouldReadDataStreamWhileWritingFromManyThreads() { var map = new SortedMap <int, int>(); var count = 1_000_000; var rounds = 5; var writeTask = Task.Run(async() => { using (Benchmark.Run("Write", count * rounds, true)) { for (int j = 0; j < rounds; j++) { var t1 = Task.Run(async() => { try { for (int i = j * count; i < (j + 1) * count; i++) { await map.TryAddLast(i, i); Thread.SpinWait(10); } } catch (Exception e) { Console.WriteLine(e); throw; } }); await t1; } } }); AsyncCursor <int, int, SortedMapCursor <int, int> > cursor = null; var cnt = 0L; var readTask = Task.Run(async() => { for (int r = 0; r < 1; r++) { using (cursor = new AsyncCursor <int, int, SortedMapCursor <int, int> >(map.GetCursor())) { using (Benchmark.Run("Read", count * rounds, true)) { try { while (await cursor.MoveNextAsync()) { Interlocked.Increment(ref cnt); } } catch (Exception e) { Console.WriteLine(e); throw; } // Left from coreclr 19161 tests, TODO remove when everything works OK // here is a strong reference to cursor with side effects of printing to console // Console.WriteLine("Last value: " + cursor.Current.Key); // another strong reference after while loop, we dereference it's value and return from task // lastKey1 = cursor.CurrentKey; } } } }); var monitor = true; var t = Task.Run(async() => { try { while (monitor) { await Task.Delay(1000); Console.WriteLine($"Key {cursor.CurrentKey}"); cursor.TryComplete(false); } } catch (Exception e) { Console.WriteLine(e); throw; } }); await writeTask; await map.Complete(); map.NotifyUpdate(true); Console.WriteLine("Read after map complete:" + Interlocked.Read(ref cnt)); await readTask; Console.WriteLine("Read after finish:" + Interlocked.Read(ref cnt)); // Console.WriteLine("Last key: " + lastKey); Benchmark.Dump(); map.Dispose(); monitor = false; }
public async Task CouldEnumerateSCMUsingCursor() { Settings.SCMDefaultChunkLength = Settings.SCMDefaultChunkLength * 4; var scm = new SortedMap <int, int>(); var count = 1_000_000; // Settings.SCMDefaultChunkLength - 1; for (int i = 0; i < count; i++) { await scm.TryAdd(i, i); } Console.WriteLine("Added first half"); #pragma warning disable HAA0401 // Possible allocation of reference type enumerator var ae = scm.GetAsyncEnumerator(); #pragma warning restore HAA0401 // Possible allocation of reference type enumerator var t = Task.Run(async() => { using (Benchmark.Run("SCM.AsyncEnumerator", count)) { try { var cnt = 0; while (await ae.MoveNextAsync()) { if (cnt != ae.Current.Key) { ThrowHelper.ThrowInvalidOperationException(); } cnt++; } await ae.DisposeAsync(); } catch (Exception ex) { Console.WriteLine("EXCEPTION: " + ex.ToString()); throw; } // Assert.AreEqual(scm.Count, cnt); } Benchmark.Dump(); }); // Thread.Sleep(1000); for (int i = count; i < count * 2; i++) { await scm.TryAdd(i, i); //Thread.SpinWait(50); } // Thread.Sleep(2000); await scm.Complete(); t.Wait(); }
public void MoveNextAsyncBenchmark() { // this benchmark shows that simple async enumeration gives 13+ mops, // this means than we should use parallel enumeration on joins. // the idea is that a chain of calculations could be somewhat heavy, e.g. rp(ma(log(zipLag(c p -> c/p)))) // but they are optimized for single thread: when movenext is called on the outer cursor, // the whole chain enumerates synchronously. // During joins, we must make these evaluations parallel. The net overhead of tasks over existing data is visible // but not too big, while on real-time stream there is no alternative at all. // Join algos should be paralell and task-based by default var count = 10000000; var sw = new Stopwatch(); var sm = new SortedMap<DateTime, double>(); //sm.IsSynchronized = true; for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } sm.Complete(); sw.Start(); double sum = 0.0; var c = sm.GetCursor(); Task.Run(async () => { while (await c.MoveNext(CancellationToken.None)) { sum += c.CurrentValue; } }).Wait(); sw.Stop(); double expectedSum = 0.0; for (int i = 0; i < count; i++) { expectedSum += i; } Assert.AreEqual(expectedSum, sum); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); Console.WriteLine("Ops: {0}", Math.Round(0.000001 * count * 1000.0 / (sw.ElapsedMilliseconds * 1.0), 2)); }
public void CouldUseCacheExtensionMethod() { var count = 3; var sw = new Stopwatch(); sw.Start(); var sm = new SortedMap <DateTime, double>(); sm.IsSynchronized = true; var addTask = Task.Run(async() => { await Task.Delay(50); try { for (int i = 0; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); //await Task.Delay(1); } sm.Complete(); } catch (Exception ex) { Console.WriteLine(ex.Message); } }); var cached = sm;//.Cache(); double sum = 0.0; var sumTask = Task.Run(async() => { try { var c = cached.GetCursor(); while (c.MoveNext()) { sum += c.CurrentValue; } Assert.AreEqual(0, sum); var stop = DateTime.UtcNow.Date.AddSeconds(count - 1); //await Task.Delay(50); while (await c.MoveNext(CancellationToken.None)) //while (c.MoveNext()) { sum += c.CurrentValue; Console.WriteLine($"Current: {c.CurrentKey} - {c.CurrentValue}"); if (c.CurrentKey == stop) { break; } } } catch (Exception ex) { Console.WriteLine(ex.Message + ":\n" + ex.ToString()); } }); sumTask.Wait(); addTask.Wait(); sw.Stop(); double expectedSum = 0.0; for (int i = 0; i < count; i++) { expectedSum += i; } Assert.AreEqual(expectedSum, sum); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds - 50); Console.WriteLine("Ops: {0}", Math.Round(0.000001 * count * 1000.0 / (sw.ElapsedMilliseconds * 1.0), 2)); try { } finally { foreach (var kvp in sm) { Console.WriteLine($"SM: {kvp.Key} - {kvp.Value}"); } } Thread.Sleep(1000); }
public void ContinuousZipIsCorrectByRandomCheck() { var sw = new Stopwatch(); var sm1 = new SortedMap<int, int>(); var sm2 = new SortedMap<int, int>(); var rng = new System.Random(31415926); //31415926 var prev1 = 0; var prev2 = 0; for (int i = 0; i < 100000; i = i + 1) { prev1 = prev1 + rng.Next(1, 11); sm1.Add(prev1, prev1); prev2 = prev2 + rng.Next(1, 11); sm2.Add(prev2, prev2); } sm1.Complete(); sm2.Complete(); //Console.WriteLine("First map:"); //foreach (var kvp in sm1) //{ // Console.WriteLine(kvp.Key); //} //Console.WriteLine("Second map:"); //foreach (var kvp in sm2) { // Console.WriteLine(kvp.Key); //} var series = new[] { sm1.Repeat(), sm2.Repeat(), }; sw.Start(); var allKeys = sm1.keys.Union(sm2.keys).OrderBy(x => x).ToArray(); int[] expectedKeys = new int[allKeys.Length]; int[] expectedValues = new int[allKeys.Length]; var size = 0; for (int i = 0; i < allKeys.Length; i++) { var val = 0; KeyValuePair<int, int> temp; var hasFirst = sm1.TryFind(allKeys[i], Lookup.LE, out temp); if (hasFirst) { val += temp.Value; var hasSecond = sm2.TryFind(allKeys[i], Lookup.LE, out temp); if (hasSecond) { val += temp.Value; expectedKeys[size] = allKeys[i]; expectedValues[size] = val; size++; } } } var expectedMap = SortedMap<int, int>.OfSortedKeysAndValues(expectedKeys, expectedValues, size); sw.Stop(); //Console.WriteLine("Expected map:"); //foreach (var kvp in expectedMap) { // Console.WriteLine(kvp.Key + " ; " + kvp.Value); //} Console.WriteLine("Manual join, elapsed msec: {0}", sw.ElapsedMilliseconds); SortedMap<int, int> sum = new SortedMap<int, int>(); for (int round = 0; round < 1; round++) { sw.Restart(); var ser = series.Zip((k, varr) => varr.Sum()); var cur = ser.GetCursor(); while (cur.MoveNext()) { sum.AddLast(cur.CurrentKey, cur.CurrentValue); } sw.Stop(); Console.WriteLine("Zip join, elapsed msec: {0}", sw.ElapsedMilliseconds); //Console.WriteLine("StateCreation: {0}", RepeatCursor<int, int>.StateCreation); //Console.WriteLine("StateHit: {0}", RepeatCursor<int, int>.StateHit); //Console.WriteLine("StateMiss: {0}", RepeatCursor<int, int>.StateMiss); } //Console.WriteLine("Sync zip map:"); //foreach (var kvp in sum) { // Console.WriteLine(kvp.Key + " ; " + kvp.Value); //} Assert.AreEqual(expectedMap.Count, sum.Count, "Results of sync and expected must be equal"); foreach (var kvp in expectedMap) { Assert.AreEqual(kvp.Value, sum[kvp.Key]); } for (int round = 0; round < 1; round++) { sw.Restart(); var ser = series.Zip((k, varr) => varr.Sum()); var cur = ser.GetCursor(); var cur2 = cur.Clone(); var sum2 = new SortedMap<int, int>(); Task.Run(async () => { while (await cur2.MoveNext(CancellationToken.None)) { sum2.Add(cur2.CurrentKey, cur2.CurrentValue); } }).Wait(); sw.Stop(); Console.WriteLine("Async Zip join, elapsed msec: {0}", sw.ElapsedMilliseconds); Assert.AreEqual(sum.Count, sum2.Count, "Results of sync and async moves must be equal"); foreach (var kvp in expectedMap) { Assert.AreEqual(kvp.Value, sum2[kvp.Key]); } } Console.WriteLine(""); }
public void CouldCloneZipLagSeries() { var count = 1000; var sm = new SortedMap<int, double>(); for (int i = 0; i < count; i++) { sm.Add(i, i); } // slow implementation var sw = new Stopwatch(); sw.Start(); var zipLag = sm.ZipLag(1, (cur, prev) => cur + prev); //.ToSortedMap(); var zc = zipLag.GetCursor(); zc.MoveNext(); var zc2 = zc.Clone(); Assert.AreEqual(zc.CurrentKey, zc2.CurrentKey); zc.MoveNext(); zc2.MoveNext(); Assert.AreEqual(zc.CurrentKey, zc2.CurrentKey); zc.MovePrevious(); zc2.MovePrevious(); Assert.AreEqual(zc.CurrentKey, zc2.CurrentKey); for (int i = 1; i < count; i++) { var expected = i + i - 1; double actual; var ok = zc.TryGetValue(i, out actual); Assert.AreEqual(expected, actual); } var sm2 = new SortedMap<int, double>(); var zc3 = sm2.ZipLag(1, (cur, prev) => cur + prev).GetCursor(); var t = Task.Run(async () => { var c = 1; // first key is missing because we cannot create state at it while (await zc3.MoveNext(CancellationToken.None)) { var expected = c + c - 1; Assert.AreEqual(expected, zc3.CurrentValue); c++; } }); for (int i = 0; i < count; i++) { sm2.Add(i, i); } sm2.Complete(); // without it MoveNextAsync will wait forever t.Wait(); }
public void ZipNFromLogoAndReadmeRepeatCouldMoveCursorCorrectly() { var upper = new SortedMap<int, int> { { 2, 2 }, { 4, 4 } }; var lower = new SortedMap<int, int> { { 1, 10 }, { 3, 30 }, { 5, 50 } }; var sum = (upper.Repeat() + lower); var cursor = sum.GetCursor(); Assert.AreEqual(32, sum[3]); Assert.AreEqual(54, sum[5]); Assert.IsFalse(cursor.MoveAt(1, Lookup.EQ)); Assert.IsTrue(cursor.MoveAt(1, Lookup.GE)); Assert.AreEqual(3, cursor.CurrentKey); Assert.AreEqual(32, cursor.CurrentValue); // move forward Assert.IsTrue(cursor.MoveNext()); Assert.AreEqual(5, cursor.CurrentKey); Assert.AreEqual(54, cursor.CurrentValue); // finished Assert.IsFalse(cursor.MoveNext()); //// move back Assert.IsTrue(cursor.MovePrevious()); Assert.AreEqual(3, cursor.CurrentKey); Assert.AreEqual(32, cursor.CurrentValue); // async moves Assert.IsTrue(cursor.MoveNext(CancellationToken.None).Result); Assert.AreEqual(5, cursor.CurrentKey); Assert.AreEqual(54, cursor.CurrentValue); var moved = false; var t = Task.Run(async () => { moved = await cursor.MoveNext(CancellationToken.None); }); // add new value lower.Add(6, 60); t.Wait(); Assert.IsTrue(moved); Assert.AreEqual(6, cursor.CurrentKey); Assert.AreEqual(4 + 60, cursor.CurrentValue); // when all sources are marked as immutable/complete, MNA must return false var t2 = Task.Run(async () => { moved = await cursor.MoveNext(CancellationToken.None); }); upper.Complete(); lower.Complete(); t2.Wait(); Assert.IsFalse(moved); }
public void CouldZipManyContinuousInRealTime() { //Assert.Inconclusive(); //Trace.TraceWarning("volkswagening: this test hangs when started together with ZipN tests"); //return; var sm1 = new SortedMap<DateTime, double>(); var sm2 = new SortedMap<DateTime, double>(); var count = 100000; for (int i = 0; i < count; i++) { sm1.Add(DateTime.UtcNow.Date.AddSeconds(i), i); sm2.Add(DateTime.UtcNow.Date.AddSeconds(i), i * 3); } var t1 = Task.Run(() => { try { Thread.Sleep(1000); for (int i = count; i < count * 2; i++) { sm1.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } } finally { sm1.Complete(); Console.WriteLine("sm1.Complete()"); } }); var t2 = Task.Run(() => { try { Thread.Sleep(950); for (int i = count; i < count * 2; i++) { sm2.Add(DateTime.UtcNow.Date.AddSeconds(i), i * 3); } } finally { sm2.Complete(); Console.WriteLine("sm2.Complete()"); } }); // this test measures isolated performance of ZipN, without ToSortedMap var sw = new Stopwatch(); var series = new[] { sm1.Repeat(), sm2.Repeat() }; sw.Start(); var totalSum = 0.0; var sumCursor = series.Zip((k, varr) => varr.Sum()).GetCursor(); var c = 0; while (c < 5 && sumCursor.MoveNext()) { //Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); totalSum += sumCursor.CurrentValue; c++; } var t3 = Task.Run(async () => { var previous = sumCursor.CurrentKey; while (await sumCursor.MoveNext(CancellationToken.None)) { //Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); //Console.WriteLine("Value: " + sumCursor.CurrentValue); totalSum += sumCursor.CurrentValue; c++; Assert.IsTrue(sumCursor.CurrentKey > previous, "Next key is less than previous"); previous = sumCursor.CurrentKey; } }); Task.WaitAll(t1, t2, t3); sw.Stop(); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); Console.WriteLine("Total sum: {0}", totalSum); Assert.AreEqual(count * 2, sm1.Count); Assert.AreEqual(count * 2, sm2.Count); }
public void ContinuousZipIsCorrectByConstrcution() { var count = 10; var sw = new Stopwatch(); var sm1 = new SortedMap<int, int>(); var sm2 = new SortedMap<int, int>(); sm1.Add(0, 0); sm2.Add(0, 0); for (int i = 2; i < count; i = i + 2) { sm1.Add(i, i); sm2.Add(i + 1, i); } sm1.Complete(); sm2.Complete(); var series = new[] { sm1.Repeat(), sm2.Repeat(), }; sw.Start(); var ser = series.Zip((k, varr) => varr.Sum()); var sum = ser.ToSortedMap(); sw.Stop(); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); for (int i = 2; i < count; i = i + 2) { Assert.AreEqual(i * 2 - 2, sum[i]); } var cur = ser.GetCursor(); var cur2 = cur.Clone(); var sum2 = new SortedMap<int, int>(); while (cur2.MoveNext(CancellationToken.None).Result) { sum2.Add(cur2.CurrentKey, cur2.CurrentValue); } Assert.AreEqual(sum.Count, sum2.Count, "Results of sync and async moves must be equal"); Assert.IsTrue(cur.MoveNext(CancellationToken.None).Result); Assert.AreEqual(0, cur.CurrentValue); var c = 2; while (cur.MoveNext(CancellationToken.None).Result) { Assert.AreEqual(c * 2 - 2, cur.CurrentValue); var x = cur.MoveNext(CancellationToken.None).Result; c += 2; } }
public void CouldZipManyNonContinuousInRealTime() { var sm1 = new SortedMap<DateTime, double>(); var sm2 = new SortedMap<DateTime, double>(); var count = 100000; for (int i = 0; i < count; i++) { sm1.Add(DateTime.UtcNow.Date.AddSeconds(i), i); sm2.Add(DateTime.UtcNow.Date.AddSeconds(i), i * 3); } Task.Run(() => { Thread.Sleep(1000); for (int i = count; i < count * 2; i++) { sm1.Add(DateTime.UtcNow.Date.AddSeconds(i), i); //Thread.Sleep(50); } sm1.Complete(); // stop mutating //Console.WriteLine("Set immutable"); }); Task.Run(() => { Thread.Sleep(950); for (int i = count; i < count * 2; i++) { sm2.Add(DateTime.UtcNow.Date.AddSeconds(i), i * 3); //Thread.Sleep(50); } sm2.Complete(); // stop mutating //Console.WriteLine("Set immutable"); }); // this test measures isolated performance of ZipN, without ToSortedMap var sw = new Stopwatch(); var series = new[] { sm1, sm2 }; sw.Start(); var totalSum = 0.0; var sumCursor = series.Zip((k, varr) => varr.Sum()).GetCursor(); var c = 0; while (c < 5 && sumCursor.MoveNext()) { //Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); totalSum += sumCursor.CurrentValue; c++; } while (sumCursor.MoveNext(CancellationToken.None).Result) { //Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); //Console.WriteLine("Value: " + sumCursor.CurrentValue); totalSum += sumCursor.CurrentValue; c++; } sw.Stop(); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); Console.WriteLine("Total sum: {0}", totalSum); }
public void CouldZipContinuousInRealTime() { var sm1 = new SortedMap<DateTime, double>(); var sm2 = new SortedMap<DateTime, double>(); var count = 100; for (int i = 0; i < count; i++) { sm1.Add(DateTime.UtcNow.Date.AddSeconds(i), i); sm2.Add(DateTime.UtcNow.Date.AddSeconds(i), i * 3); } Task.Run(() => { Thread.Sleep(1000); for (int i = count; i < count * 2; i++) { sm1.Add(DateTime.UtcNow.Date.AddSeconds(i), i); Thread.Sleep(50); } sm1.Complete(); // stop mutating //Console.WriteLine("Set immutable"); }); Task.Run(() => { Thread.Sleep(950); for (int i = count; i < count * 2; i++) { sm2.Add(DateTime.UtcNow.Date.AddSeconds(i), i * 3); Thread.Sleep(50); } sm2.Complete(); // stop mutating //Console.WriteLine("Set immutable"); }); // this test measures isolated performance of ZipN, without ToSortedMap Thread.Sleep(1050); var sw = new Stopwatch(); var series = new[] { sm1.Repeat(), sm2.Repeat() }; sw.Start(); var totalSum = 0.0; var sumCursor = series.Zip((k, varr) => varr.Sum()).GetCursor(); var c = 0; while (c < 5 && sumCursor.MoveNext()) { Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); totalSum += sumCursor.CurrentValue; c++; } Task.Run(async () => { while (await sumCursor.MoveNext(CancellationToken.None)) { if (Math.Abs(c * 4.0 - sumCursor.CurrentValue) <= 3.0) { // NB VolksWagening // TODO deal with it somehow, e.g. with recalc of the last value, and explicitly document Trace.TraceWarning("Zipping continuous series in real-time is inherently non-deterministic"); } else { Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); } Console.WriteLine("Value: " + sumCursor.CurrentValue); totalSum += sumCursor.CurrentValue; c++; } sw.Stop(); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); Console.WriteLine("Total sum: {0}", totalSum); }).Wait(); Thread.Sleep(100); sumCursor.Dispose(); }
public void TGVSpeed() { for (int size = 0; size < 3; size++) { var count = (int)(1024 * Math.Pow(2, size)); const int mult = 1000; var sl = new SortedList <DateTime, int>(); var sm = new SortedMap <DateTime, int>(count); var scm = new SortedChunkedMap <DateTime, int>(); var start = DateTime.Today.ToUniversalTime(); for (int i = 0; i < count; i++) { if (i != 2) // make irregular { sl.Add(start.AddTicks(i), i); sm.Add(start.AddTicks(i), i); scm.Add(start.AddTicks(i), i); } } Assert.IsFalse(sm.IsCompleted); Assert.IsFalse(sm.IsCompleted); Assert.IsTrue(sm.IsSynchronized); sm.Complete(); Assert.IsTrue(sm.IsCompleted); Assert.IsTrue(sm.IsCompleted); Assert.IsFalse(sm.IsSynchronized); scm.Complete(); for (int r = 0; r < 20; r++) { //var sum1 = 0L; //using (Benchmark.Run("SL", count * mult, true)) //{ // for (int j = 0; j < mult; j++) // { // for (int i = 0; i < count; i++) // { // if (sl.TryGetValue(start.AddTicks(i), out var v)) // { // sum1 += v; // } // } // } //} //Assert.True(sum1 > 0); var sum2 = 0L; using (Benchmark.Run("SM", count * mult, true)) { for (int j = 0; j < mult; j++) { for (int i = 0; i < count; i++) { if (sm.TryGetValueUnchecked(start.AddTicks(i), out var v)) { sum2 += v; } } } } //Assert.True(sum2 > 0); //Assert.AreEqual(sum1, sum2); //var sum3 = 0L; //using (Benchmark.Run("SCM", count * mult, true)) //{ // for (int j = 0; j < mult; j++) // { // for (int i = 0; i < count; i++) // { // if (scm.TryGetValue(start.AddTicks(i), out var v)) // { // sum3 += v; // } // } // } //} //Assert.True(sum3 > 0); //Assert.AreEqual(sum2, sum3); } Benchmark.Dump($"Size = {Math.Pow(2, size)}k elements"); } }
public void ZipNFromLogoAndReadmeRepeatCouldMoveCursorCorrectly() { var upper = new SortedMap <int, int> { { 2, 2 }, { 4, 4 } }; var lower = new SortedMap <int, int> { { 1, 10 }, { 3, 30 }, { 5, 50 } }; var sum = (upper.Repeat() + lower); var cursor = sum.GetCursor(); Assert.AreEqual(32, sum[3]); Assert.AreEqual(54, sum[5]); Assert.IsFalse(cursor.MoveAt(1, Lookup.EQ)); Assert.IsTrue(cursor.MoveAt(1, Lookup.GE)); Assert.AreEqual(3, cursor.CurrentKey); Assert.AreEqual(32, cursor.CurrentValue); // move forward Assert.IsTrue(cursor.MoveNext()); Assert.AreEqual(5, cursor.CurrentKey); Assert.AreEqual(54, cursor.CurrentValue); // finished Assert.IsFalse(cursor.MoveNext()); //// move back Assert.IsTrue(cursor.MovePrevious()); Assert.AreEqual(3, cursor.CurrentKey); Assert.AreEqual(32, cursor.CurrentValue); // async moves Assert.IsTrue(cursor.MoveNext(CancellationToken.None).Result); Assert.AreEqual(5, cursor.CurrentKey); Assert.AreEqual(54, cursor.CurrentValue); var moved = false; var t = Task.Run(async() => { moved = await cursor.MoveNext(CancellationToken.None); }); // add new value lower.Add(6, 60); t.Wait(); Assert.IsTrue(moved); Assert.AreEqual(6, cursor.CurrentKey); Assert.AreEqual(4 + 60, cursor.CurrentValue); // when all sources are marked as immutable/complete, MNA must return false var t2 = Task.Run(async() => { moved = await cursor.MoveNext(CancellationToken.None); }); upper.Complete(); lower.Complete(); t2.Wait(); Assert.IsFalse(moved); }
public void EnumerateScmSpeed() { const int count = 10_000_000; var sl = new SortedList <int, int>(); var sm = new SortedMap <int, int>(); var scm = new SortedChunkedMap <int, int>(); for (int i = 0; i < count; i++) { if (i % 1000 != 0) { sl.Add(i, i); sm.Add(i, i); //scm.Add(i, i); } } sm.Complete(); // scm.Complete(); for (int r = 0; r < 20; r++) { //var sum1 = 0L; //using (Benchmark.Run("SL", count)) //{ // using (var c = sl.GetEnumerator()) // { // while (c.MoveNext()) // { // sum1 += c.Current.Value; // } // } //} //Assert.True(sum1 > 0); var sum2 = 0L; using (Benchmark.Run("SM Current.Value", count)) { using (var c = sm.GetEnumerator()) { while (c.MoveNext()) { sum2 += c.Current.Value; } } } //Assert.AreEqual(sum1, sum2); var sum3 = 0L; using (Benchmark.Run("SM CurrentValue", count)) { using (var c = sm.GetEnumerator()) { while (c.MoveNext()) { sum3 += c.CurrentValue; } } } //Assert.AreEqual(sum1, sum3); //var sum4 = 0L; //using (Benchmark.Run("SCM Current.Value", count)) //{ // using (var c = scm.GetEnumerator()) // { // while (c.MoveNext()) // { // sum4 += c.Current.Value; // } // } //} //Assert.AreEqual(sum1, sum4); //var sum5 = 0L; //using (Benchmark.Run("SCM CurrentValue", count)) //{ // using (var c = scm.GetEnumerator()) // { // while (c.MoveNext()) // { // sum5 += c.CurrentValue; // } // } //} //Assert.AreEqual(sum1, sum5); } Benchmark.Dump(); }
public void CouldNotMoveAsyncContinuousOnEmptyZip() { var sm1 = new SortedMap<int, int>(); var sm2 = new SortedMap<int, int>(); sm1.Complete(); sm2.Complete(); var zipped = sm1.Repeat() + sm2.Repeat(); var c1 = zipped.GetCursor(); Assert.IsFalse(sm1.GetCursor().MoveNext(CancellationToken.None).Result); Assert.IsFalse(sm2.GetCursor().MoveNext(CancellationToken.None).Result); Assert.IsFalse(c1.MoveNext()); Assert.IsFalse(c1.MoveFirst()); var task = c1.MoveNext(CancellationToken.None); task.Wait(); Assert.AreEqual(TaskStatus.RanToCompletion, task.Status); Assert.IsFalse(task.Result); }
public void CouldZipManyContinuousInRealTime3() { var sm1 = new SortedMap<DateTime, double>(); var sm2 = new SortedMap<DateTime, double>(); var count = 100000; for (int i = 0; i < count; i++) { sm1.Add(DateTime.UtcNow.Date.AddSeconds(i), i); sm2.Add(DateTime.UtcNow.Date.AddSeconds(i), i * 3); } sm1.Complete(); // will mutate after the first batch sm2.Complete(); // this test measures isolated performance of ZipN, without ToSortedMap var sw = new Stopwatch(); var series = new[] { sm1.Repeat(), sm2.Repeat(), sm1.Repeat(), sm2.Repeat(), sm1.Repeat(), sm2.Repeat(), sm1.Repeat(), sm2.Repeat(), sm1.Repeat(), sm2.Repeat() }; sw.Start(); var totalSum = 0.0; var sumCursor = series.Zip((k, varr) => varr.Sum()).GetCursor(); var c = 0; //while (sumCursor.MoveNext()) { // //Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); // totalSum += sumCursor.CurrentValue; // c++; //} Task.Run(async () => { while (await sumCursor.MoveNext(CancellationToken.None)) { //Assert.AreEqual(c * 4.0, sumCursor.CurrentValue); //Console.WriteLine("Value: " + sumCursor.CurrentValue); totalSum += sumCursor.CurrentValue; c++; } sw.Stop(); Console.WriteLine("Elapsed msec: {0}", sw.ElapsedMilliseconds); Console.WriteLine("Total sum: {0}", totalSum); }).Wait(); }
public void CouldReadSortedMapNewValuesWhileTheyAreAddedUsingCursor_NoSemaphore() { var cts = new CancellationTokenSource(); var ct = CancellationToken.None; // cts.Token; // var count = 10000000; var sw = new Stopwatch(); sw.Start(); var sm = new SortedMap <DateTime, double>(); sm.IsSynchronized = true; //var sm = new SortedChunkedMap<DateTime, double>(); //sm.Add(DateTime.UtcNow.Date.AddSeconds(-2), 0); for (int i = 0; i < 5; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } var histogram = new LongHistogram(TimeSpan.TicksPerMillisecond * 100 * 1000, 3); double sum = 0; var cnt = 0; var histogram1 = new LongHistogram(TimeSpan.TicksPerMillisecond * 100 * 1000, 3); var sumTask = Task.Run(async() => { var c = sm.GetCursor(); var startTick = sw.ElapsedTicks; while (await c.MoveNext(ct)) { sum += c.CurrentValue; if ((int)c.CurrentValue != cnt) { //Console.WriteLine("Wrong sequence"); //Assert.Fail($"Wrong sequence: {c.CurrentValue} != {cnt}"); Trace.WriteLine($"Wrong sequence1: {c.CurrentValue} != {cnt}; thread {Thread.CurrentThread.ManagedThreadId}"); } else { //Console.WriteLine("Async move"); } cnt++; var ticks = sw.ElapsedTicks - startTick; var nanos = (long)(1000000000.0 * (double)ticks / Stopwatch.Frequency); try { histogram1.RecordValue(nanos); } catch (Exception e) { Console.WriteLine($"Nanos: {nanos}; " + e.Message); } startTick = sw.ElapsedTicks; } }); double sum2 = 0; var cnt2 = 0; var histogram2 = new LongHistogram(TimeSpan.TicksPerMillisecond * 100 * 1000, 3); var sumTask2 = Task.Run(async() => { var c = sm.GetCursor(); var startTick = sw.ElapsedTicks; while (await c.MoveNext(ct)) { sum2 += c.CurrentValue; if ((int)c.CurrentValue != cnt2) { //Console.WriteLine("Wrong sequence"); //Assert.Fail($"Wrong sequence: {c.CurrentValue} != {cnt}"); Trace.WriteLine($"Wrong sequence2: {c.CurrentValue} != {cnt2}; thread {Thread.CurrentThread.ManagedThreadId}"); } else { //Console.WriteLine("Async move"); } cnt2++; var ticks = sw.ElapsedTicks - startTick; var nanos = (long)(1000000000.0 * (double)ticks / Stopwatch.Frequency); try { histogram2.RecordValue(nanos); } catch (Exception e) { Console.WriteLine($"Nanos: {nanos}; " + e.Message); } startTick = sw.ElapsedTicks; } }); double sum3 = 0; var cnt3 = 0; var histogram3 = new LongHistogram(TimeSpan.TicksPerMillisecond * 100 * 1000, 3); var sumTask3 = Task.Run(async() => { var c = sm.GetCursor(); var startTick = sw.ElapsedTicks; while (await c.MoveNext(ct)) { sum3 += c.CurrentValue; if ((int)c.CurrentValue != cnt3) { //Console.WriteLine("Wrong sequence"); //Assert.Fail($"Wrong sequence: {c.CurrentValue} != {cnt}"); Trace.WriteLine($"Wrong sequence3: {c.CurrentValue} != {cnt3}; thread {Thread.CurrentThread.ManagedThreadId}"); } else { //Console.WriteLine("Async move"); } cnt3++; var ticks = sw.ElapsedTicks - startTick; var nanos = (long)(1000000000.0 * (double)ticks / Stopwatch.Frequency); try { histogram3.RecordValue(nanos); } catch (Exception e) { Console.WriteLine($"Nanos: {nanos}; " + e.Message); } startTick = sw.ElapsedTicks; } }); Thread.Sleep(1); var addTask = Task.Run(() => { //Console.WriteLine($"Adding from thread {Thread.CurrentThread.ManagedThreadId}"); try { for (int i = 5; i < count; i++) { sm.Add(DateTime.UtcNow.Date.AddSeconds(i), i); } sm.Complete(); } catch (Exception ex) { Console.WriteLine(ex); Environment.FailFast(ex.Message, ex); } }); while (!sumTask.Wait(2000)) { OptimizationSettings.Verbose = true; Trace.WriteLine($"cnt: {cnt}"); } while (!sumTask2.Wait(2000)) { //OptimizationSettings.Verbose = true; Trace.WriteLine($"cnt2: {cnt2}"); } while (!sumTask3.Wait(2000)) { //OptimizationSettings.Verbose = true; Trace.WriteLine($"cnt3: {cnt3}"); } addTask.Wait(); histogram.Add(histogram1); histogram.Add(histogram2); histogram.Add(histogram3); histogram.OutputPercentileDistribution( writer: Console.Out, percentileTicksPerHalfDistance: 3, outputValueUnitScalingRatio: OutputScalingFactor.None); sw.Stop(); Trace.Write($"Elapsed msec: {sw.ElapsedMilliseconds}; "); Trace.WriteLine($"Ops: {Math.Round(0.000001 * count * 1000.0 / (sw.ElapsedMilliseconds * 1.0), 2)}"); double expectedSum = 0.0; for (int i = 0; i < count; i++) { expectedSum += i; } if (expectedSum != sum) { Trace.WriteLine("Sum 1 is wrong"); } if (expectedSum != sum2) { Trace.WriteLine("Sum 2 is wrong"); } if (expectedSum != sum3) { Trace.WriteLine("Sum 3 is wrong"); } Assert.AreEqual(expectedSum, sum, "Sum 1"); Assert.AreEqual(expectedSum, sum2, "Sum 2"); Assert.AreEqual(expectedSum, sum3, "Sum 3"); //sm.Dispose(); }