public void EmitsData() { var algorithm = new AlgorithmStub(forex: new List <string> { "EURUSD" }); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); var lastTime = DateTime.MinValue; var timeProvider = new RealTimeProvider(); var dataQueueHandler = new FuncDataQueueHandler(fdqh => { var time = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.EasternStandard); if (time == lastTime) { return(Enumerable.Empty <BaseData>()); } lastTime = time; return(Enumerable.Range(0, 9).Select(x => new Tick(time.AddMilliseconds(x * 100), Symbols.EURUSD, 1.3m, 1.2m, 1.3m))); }); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, timeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider)); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); // wait for feed.Run to actually begin feedThreadStarted.WaitOne(); var emittedData = false; ConsumeBridge(feed, TimeSpan.FromSeconds(10), true, ts => { if (ts.Slice.Count != 0) { emittedData = true; Console.WriteLine("HasData: " + ts.Slice.Bars[Symbols.EURUSD].EndTime); Console.WriteLine(); } }); Assert.IsTrue(emittedData); }
private IDataFeed RunDataFeed(QCAlgorithm algorithm, out FuncDataQueueHandler dataQueueHandler, DataManager dataManager, Func <FuncDataQueueHandler, IEnumerable <BaseData> > getNextTicksFunction = null) { algorithm.SetStartDate(_startDate); var lastTime = _manualTimeProvider.GetUtcNow(); getNextTicksFunction = getNextTicksFunction ?? (fdqh => { var time = _manualTimeProvider.GetUtcNow(); if (time == lastTime) { return(Enumerable.Empty <BaseData>()); } lastTime = time; return(fdqh.Subscriptions.Where(symbol => !algorithm.UniverseManager.ContainsKey(symbol)) // its not a universe .Select(symbol => new Tick(lastTime.ConvertFromUtc(TimeZones.NewYork), symbol, 1, 2) { Quantity = 1, // Symbol could not be in the Securities collections for the custom Universe tests. AlgorithmManager is in charge of adding them, and we are not executing that code here. TickType = algorithm.Securities.ContainsKey(symbol) ? algorithm.Securities[symbol].SubscriptionDataConfig.TickType : TickType.Trade })); }); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); dataQueueHandler = new FuncDataQueueHandler(getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, _manualTimeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); algorithm.PostInitialize(); Thread.Sleep(150); // small handicap for the data to be pumped so TimeSlices have data of all subscriptions var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); // wait for feed.Run to actually begin feedThreadStarted.WaitOne(); return(feed); }
private static IEnumerable <BaseData> ProduceBenchmarkTicks(FuncDataQueueHandler fdqh, Count count) { for (int i = 0; i < 10000; i++) { foreach (var symbol in fdqh.Subscriptions) { count.Value++; yield return(new Tick { Symbol = symbol }); } } }
public void EmitsData() { var algorithm = new AlgorithmStub(forex: new List<string> {"EURUSD"}); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); var dataFileProvider = new DefaultDataFileProvider(); var lastTime = DateTime.MinValue; var timeProvider = new RealTimeProvider(); var dataQueueHandler = new FuncDataQueueHandler(fdqh => { var time = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.EasternStandard); if (time == lastTime) return Enumerable.Empty<BaseData>(); lastTime = time; return Enumerable.Range(0, 9).Select(x => new Tick(time.AddMilliseconds(x*100), Symbols.EURUSD, 1.3m, 1.2m, 1.3m)); }); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, timeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), dataFileProvider); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); // wait for feed.Run to actually begin feedThreadStarted.WaitOne(); var emittedData = false; ConsumeBridge(feed, TimeSpan.FromSeconds(10), true, ts => { if (ts.Slice.Count != 0) { emittedData = true; Console.WriteLine("HasData: " + ts.Slice.Bars[Symbols.EURUSD].EndTime); Console.WriteLine(); } }); Assert.IsTrue(emittedData); }
public void FastExitsDoNotThrowUnhandledExceptions() { DataManager dataManager; var algorithm = new AlgorithmStub(out dataManager, Resolution.Tick, Enumerable.Range(0, 20).Select(x => x.ToString()).ToList()); var getNextTicksFunction = Enumerable.Range(0, 20).Select(x => new Tick { Symbol = SymbolCache.GetSymbol(x.ToString()) }).ToList(); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); var dataQueueHandler = new FuncDataQueueHandler(handler => getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); var feedThreadStarted = new ManualResetEvent(false); var unhandledExceptionWasThrown = false; Task.Run(() => { try { feedThreadStarted.Set(); feed.Run(); } catch (Exception ex) { QuantConnect.Logging.Log.Error(ex.ToString()); unhandledExceptionWasThrown = true; } }); feedThreadStarted.WaitOne(); feed.Exit(); Thread.Sleep(1000); Assert.IsFalse(unhandledExceptionWasThrown); }
private IDataFeed RunDataFeed(out FuncDataQueueHandler dataQueueHandler, Func <FuncDataQueueHandler, IEnumerable <BaseData> > getNextTicksFunction = null, Resolution resolution = Resolution.Second, List <string> equities = null, List <string> forex = null) { _algorithm.SetStartDate(_startDate); var lastTime = _manualTimeProvider.GetUtcNow(); getNextTicksFunction = getNextTicksFunction ?? (fdqh => { var time = _manualTimeProvider.GetUtcNow(); if (time == lastTime) { return(Enumerable.Empty <BaseData>()); } lastTime = time; return(fdqh.Subscriptions.Where(symbol => !_algorithm.UniverseManager.ContainsKey(symbol)) // its not a universe .Select(symbol => new Tick(lastTime.ConvertFromUtc(TimeZones.NewYork), symbol, 1, 2) { Quantity = 1, // Symbol could not be in the Securities collections for the custom Universe tests. AlgorithmManager is in charge of adding them, and we are not executing that code here. TickType = _algorithm.Securities.ContainsKey(symbol) ? _algorithm.Securities[symbol].SubscriptionDataConfig.TickType : TickType.Trade })); }); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); dataQueueHandler = new FuncDataQueueHandler(getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, _manualTimeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); var dataManager = new DataManager(feed, new UniverseSelection(feed, _algorithm), _algorithm.Settings, _algorithm.TimeKeeper); _algorithm.SubscriptionManager.SetDataManager(dataManager); _algorithm.AddSecurities(resolution, equities, forex); feed.Initialize(_algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); _algorithm.PostInitialize(); Thread.Sleep(150); // small handicap for the data to be pumped so TimeSlices have data of all subscriptions return(feed); }
private static BaseDataExchange CreateExchange(ConcurrentQueue <BaseData> dataQueue) { var dataQueueHandler = new FuncDataQueueHandler(q => { BaseData data; int count = 0; var list = new List <BaseData>(); while (++count < 10 && dataQueue.TryDequeue(out data)) { list.Add(data); } return(list); }); var exchange = new BaseDataExchange(); exchange.AddEnumerator(GetNextTicksEnumerator(dataQueueHandler)); return(exchange); }
private static BaseDataExchange CreateExchange(ConcurrentQueue <BaseData> dataQueue) { var dataQueueHandler = new FuncDataQueueHandler(q => { BaseData data; int count = 0; var list = new List <BaseData>(); while (++count < 10 && dataQueue.TryDequeue(out data)) { list.Add(data); } return(list); }); var exchange = new BaseDataExchange("test"); IEnumerator <BaseData> enumerator = GetNextTicksEnumerator(dataQueueHandler); var sym = Symbol.Create("data-queue-handler-symbol", SecurityType.Base, Market.USA); exchange.AddEnumerator(sym, enumerator, null, null); return(exchange); }
private IDataFeed RunLiveDataFeed( IAlgorithm algorithm, DateTime startDate, IEnumerable <Symbol> symbols, ITimeProvider timeProvider, DataManager dataManager, FuncDataQueueHandler funcDataQueueHandler = null) { _feed = new TestableLiveTradingDataFeed(funcDataQueueHandler ?? new FuncDataQueueHandler(x => Enumerable.Empty <BaseData>())); _synchronizer = new TestableSynchronizer(algorithm, dataManager, _feed, true, timeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); _feed.Initialize(algorithm, new LiveNodePacket(), new BacktestingResultHandler(), mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), new DefaultDataProvider(), dataManager, _synchronizer); foreach (var symbol in symbols) { var config = algorithm.Securities[symbol].SubscriptionDataConfig; var request = new SubscriptionRequest(false, null, algorithm.Securities[symbol], config, startDate, Time.EndOfTime); _feed.AddSubscription(request); } return(_feed); }
private IDataFeed RunDataFeed(IAlgorithm algorithm, out FuncDataQueueHandler dataQueueHandler, ITimeProvider timeProvider = null, Func<FuncDataQueueHandler, IEnumerable<BaseData>> getNextTicksFunction = null) { getNextTicksFunction = getNextTicksFunction ?? (fdqh => fdqh.Subscriptions.Select(symbol => new Tick(DateTime.Now, symbol, 1, 2){Quantity = 1})); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new ConsoleResultHandler(); // new ResultHandlerStub(); dataQueueHandler = new FuncDataQueueHandler(getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, timeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider)); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); // wait for feed.Run to actually begin feedThreadStarted.WaitOne(); return feed; }
private static IEnumerable<BaseData> ProduceBenchmarkTicks(FuncDataQueueHandler fdqh, Count count) { for (int i = 0; i < 10000; i++) { foreach (var symbol in fdqh.Subscriptions) { count.Value++; yield return new Tick{Symbol = symbol}; } } }
private void CreateDataFeed( FuncDataQueueHandler funcDataQueueHandler = null) { _feed = new TestableLiveTradingDataFeed(funcDataQueueHandler ?? new FuncDataQueueHandler(x => Enumerable.Empty <BaseData>(), new RealTimeProvider())); }
public void RemoteDataDoesNotIncreaseNumberOfSlices() { Config.Set("quandl-auth-token", "QUANDL-TOKEN"); var startDate = new DateTime(2018, 4, 2); var endDate = new DateTime(2018, 4, 19); var algorithm = new QCAlgorithm(); var timeProvider = new ManualTimeProvider(TimeZones.NewYork); timeProvider.SetCurrentTime(startDate); var dataQueueHandler = new FuncDataQueueHandler(fdqh => { var time = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.NewYork); var tick = new Tick(time, Symbols.SPY, 1.3m, 1.2m, 1.3m) { TickType = TickType.Trade }; var tick2 = new Tick(time, Symbols.AAPL, 1.3m, 1.2m, 1.3m) { TickType = TickType.Trade }; return(new[] { tick, tick2 }); }, timeProvider); CreateDataFeed(dataQueueHandler); var dataManager = new DataManagerStub(algorithm, _feed); algorithm.SubscriptionManager.SetDataManager(dataManager); var symbols = new List <Symbol> { algorithm.AddData <Quandl>("CBOE/VXV", Resolution.Daily).Symbol, algorithm.AddData <QuandlVix>("CBOE/VIX", Resolution.Daily).Symbol, algorithm.AddEquity("SPY", Resolution.Daily).Symbol, algorithm.AddEquity("AAPL", Resolution.Daily).Symbol }; algorithm.PostInitialize(); var cancellationTokenSource = new CancellationTokenSource(); var dataPointsEmitted = 0; var slicesEmitted = 0; RunLiveDataFeed(algorithm, startDate, symbols, timeProvider, dataManager); Thread.Sleep(5000); // Give remote sources a handicap, so the data is available in time // create a timer to advance time much faster than realtime and to simulate live Quandl data file updates var timerInterval = TimeSpan.FromMilliseconds(100); var timer = Ref.Create <Timer>(null); timer.Value = new Timer(state => { // stop the timer to prevent reentrancy timer.Value.Change(Timeout.Infinite, Timeout.Infinite); var currentTime = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.NewYork); if (currentTime.Date > endDate.Date) { _feed.Exit(); cancellationTokenSource.Cancel(); return; } timeProvider.Advance(TimeSpan.FromHours(3)); // restart the timer timer.Value.Change(timerInterval, timerInterval); }, null, TimeSpan.FromSeconds(2), timerInterval); try { foreach (var timeSlice in _synchronizer.StreamData(cancellationTokenSource.Token)) { if (timeSlice.Slice.HasData) { slicesEmitted++; dataPointsEmitted += timeSlice.Slice.Values.Count; Assert.IsTrue(timeSlice.Slice.Values.Any(x => x.Symbol == symbols[0]), $"Slice doesn't contain {symbols[0]}"); Assert.IsTrue(timeSlice.Slice.Values.Any(x => x.Symbol == symbols[1]), $"Slice doesn't contain {symbols[1]}"); Assert.IsTrue(timeSlice.Slice.Values.Any(x => x.Symbol == symbols[2]), $"Slice doesn't contain {symbols[2]}"); Assert.IsTrue(timeSlice.Slice.Values.Any(x => x.Symbol == symbols[3]), $"Slice doesn't contain {symbols[3]}"); } } } catch (Exception exception) { Log.Trace($"Error: {exception}"); } timer.Value.Dispose(); dataManager.RemoveAllSubscriptions(); dataQueueHandler.DisposeSafely(); Assert.AreEqual(14, slicesEmitted); Assert.AreEqual(14 * symbols.Count, dataPointsEmitted); }
public void CoarseUniverseRotatesActiveSecurity() { var startDate = new DateTime(2014, 3, 24); var endDate = new DateTime(2014, 3, 29); var timeProvider = new ManualTimeProvider(TimeZones.NewYork); timeProvider.SetCurrentTime(startDate); var coarseTimes = new List <DateTime> { new DateTime(2014, 3, 25, 5, 0, 0, 0), new DateTime(2014, 3, 26, 5, 0, 0, 0), new DateTime(2014, 3, 27, 5, 0, 0, 0), new DateTime(2014, 3, 28, 5, 0, 0, 0), new DateTime(2014, 3, 29, 5, 0, 0, 0) }.ToHashSet(); var coarseSymbols = new List <Symbol> { Symbols.SPY, Symbols.AAPL, Symbols.MSFT }; var emitted = new AutoResetEvent(false); var dataQueueHandler = new FuncDataQueueHandler(fdqh => Enumerable.Empty <BaseData>(), timeProvider); var feed = new TestableLiveTradingDataFeed(dataQueueHandler); var algorithm = new AlgorithmStub(feed); algorithm.SetLiveMode(true); var mock = new Mock <ITransactionHandler>(); mock.Setup(m => m.GetOpenOrders(It.IsAny <Func <Order, bool> >())).Returns(new List <Order>()); algorithm.Transactions.SetOrderProcessor(mock.Object); var synchronizer = new TestableLiveSynchronizer(timeProvider); synchronizer.Initialize(algorithm, algorithm.DataManager); var mapFileProvider = new LocalDiskMapFileProvider(); feed.Initialize(algorithm, new LiveNodePacket(), new BacktestingResultHandler(), mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), new DefaultDataProvider(), algorithm.DataManager, synchronizer, new DataChannelProvider()); var symbolIndex = 0; var coarseUniverseSelectionCount = 0; algorithm.AddUniverse( coarse => { Log.Trace($"Emitted at {algorithm.Time}. Coarse {coarse.First().Time} to {coarse.First().EndTime}"); Interlocked.Increment(ref coarseUniverseSelectionCount); emitted.Set(); // rotate single symbol in universe if (symbolIndex == coarseSymbols.Count) { symbolIndex = 0; } return(new[] { coarseSymbols[symbolIndex++] }); }); algorithm.PostInitialize(); var cancellationTokenSource = new CancellationTokenSource(); Exception exceptionThrown = null; // create a timer to advance time much faster than realtime var timerInterval = TimeSpan.FromMilliseconds(5); var timer = Ref.Create <Timer>(null); timer.Value = new Timer(state => { try { var currentTime = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.NewYork); if (currentTime.Date > endDate.Date) { feed.Exit(); cancellationTokenSource.Cancel(); return; } timeProvider.Advance(TimeSpan.FromHours(1)); var time = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.NewYork); algorithm.SetDateTime(timeProvider.GetUtcNow()); if (coarseTimes.Contains(time)) { // lets wait for coarse to emit if (!emitted.WaitOne(TimeSpan.FromMilliseconds(15000))) { throw new TimeoutException($"Timeout waiting for coarse to emit at {time}"); } } var activeSecuritiesCount = algorithm.ActiveSecurities.Count; Assert.That(activeSecuritiesCount <= 1); // restart the timer timer.Value.Change(timerInterval, Timeout.InfiniteTimeSpan); } catch (Exception exception) { Log.Error(exception); exceptionThrown = exception; feed.Exit(); cancellationTokenSource.Cancel(); } }, null, timerInterval, Timeout.InfiniteTimeSpan); foreach (var _ in synchronizer.StreamData(cancellationTokenSource.Token)) { } timer.Value.DisposeSafely(); algorithm.DataManager.RemoveAllSubscriptions(); dataQueueHandler.DisposeSafely(); synchronizer.DisposeSafely(); emitted.DisposeSafely(); if (exceptionThrown != null) { throw new Exception("Exception in timer: ", exceptionThrown); } Assert.AreEqual(coarseTimes.Count, coarseUniverseSelectionCount, message: "coarseUniverseSelectionCount"); }
public void CoarseUniverseRotatesActiveSecurity() { var startDate = new DateTime(2014, 3, 24); var endDate = new DateTime(2014, 3, 28); var timeProvider = new ManualTimeProvider(TimeZones.NewYork); timeProvider.SetCurrentTime(startDate); var coarseTimes = new List <DateTime> { new DateTime(2014, 3, 25), new DateTime(2014, 3, 25, 23, 0, 0), new DateTime(2014, 3, 27, 1, 0, 0) }.ToHashSet(); var coarseSymbols = new List <Symbol> { Symbols.SPY, Symbols.AAPL, Symbols.MSFT }; var coarseUsaSymbol = CoarseFundamental.CreateUniverseSymbol(Market.USA, false); var coarseDataEmittedCount = 0; var lastTime = DateTime.MinValue; var dataQueueHandler = new FuncDataQueueHandler(fdqh => { var time = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.NewYork); if (time != lastTime) { lastTime = time; if (coarseTimes.Contains(time)) { // emit coarse data at selected times var coarseData = new BaseDataCollection { Symbol = coarseUsaSymbol }; foreach (var symbol in coarseSymbols) { coarseData.Data.Add( new CoarseFundamental { Symbol = symbol, Time = time, Market = Market.USA, Value = 100 }); } coarseDataEmittedCount++; return(new List <BaseData> { coarseData }); } } return(Enumerable.Empty <BaseData>()); }); var feed = new TestableLiveTradingDataFeed(dataQueueHandler); var algorithm = new AlgorithmStub(feed); algorithm.SetLiveMode(true); var mock = new Mock <ITransactionHandler>(); mock.Setup(m => m.GetOpenOrders(It.IsAny <Func <Order, bool> >())).Returns(new List <Order>()); algorithm.Transactions.SetOrderProcessor(mock.Object); var synchronizer = new TestableLiveSynchronizer(timeProvider); synchronizer.Initialize(algorithm, algorithm.DataManager); var mapFileProvider = new LocalDiskMapFileProvider(); feed.Initialize(algorithm, new LiveNodePacket(), new BacktestingResultHandler(), mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), new DefaultDataProvider(), algorithm.DataManager, synchronizer); var symbolIndex = 0; var coarseUniverseSelectionCount = 0; algorithm.AddUniverse( coarse => { coarseUniverseSelectionCount++; // rotate single symbol in universe if (symbolIndex == coarseSymbols.Count) { symbolIndex = 0; } return(new[] { coarseSymbols[symbolIndex++] }); }); algorithm.PostInitialize(); var cancellationTokenSource = new CancellationTokenSource(); Exception exceptionThrown = null; // create a timer to advance time much faster than realtime var timerInterval = TimeSpan.FromMilliseconds(50); var timer = Ref.Create <Timer>(null); timer.Value = new Timer(state => { try { // stop the timer to prevent reentrancy timer.Value.Change(Timeout.Infinite, Timeout.Infinite); var currentTime = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.NewYork); if (currentTime.Date > endDate.Date) { feed.Exit(); cancellationTokenSource.Cancel(); return; } timeProvider.Advance(TimeSpan.FromHours(1)); var activeSecuritiesCount = algorithm.ActiveSecurities.Count; Assert.That(activeSecuritiesCount <= 1); // restart the timer timer.Value.Change(timerInterval, timerInterval); } catch (Exception exception) { Log.Error(exception); exceptionThrown = exception; feed.Exit(); cancellationTokenSource.Cancel(); } }, null, TimeSpan.FromSeconds(1), timerInterval); foreach (var _ in synchronizer.StreamData(cancellationTokenSource.Token)) { } timer.Value.Dispose(); if (exceptionThrown != null) { throw new Exception("Exception in timer: ", exceptionThrown); } Assert.AreEqual(coarseTimes.Count, coarseDataEmittedCount); Assert.AreEqual(coarseTimes.Count, coarseUniverseSelectionCount); }
private static BaseDataExchange CreateExchange(ConcurrentQueue<BaseData> dataQueue) { var dataQueueHandler = new FuncDataQueueHandler(q => { BaseData data; int count = 0; var list = new List<BaseData>(); while (++count < 10 && dataQueue.TryDequeue(out data)) list.Add(data); return list; }); var exchange = new BaseDataExchange(); exchange.AddEnumerator(GetNextTicksEnumerator(dataQueueHandler)); return exchange; }