private IDataFeed RunDataFeed(IAlgorithm algorithm, out FuncDataQueueHandler dataQueueHandler, ITimeProvider timeProvider = null, Func <FuncDataQueueHandler, IEnumerable <BaseData> > getNextTicksFunction = null) { getNextTicksFunction = getNextTicksFunction ?? (fdqh => fdqh.Subscriptions.Select(symbol => new Tick(DateTime.Now, symbol, 1, 2) { Quantity = 1 })); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); // new ResultHandlerStub(); dataQueueHandler = new FuncDataQueueHandler(getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, timeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider)); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); // wait for feed.Run to actually begin feedThreadStarted.WaitOne(); return(feed); }
public void TestAlgorithmManagerSpeed() { var algorithmManager = new AlgorithmManager(false); var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var job = new BacktestNodePacket(1, 2, "3", null, 9m, $"{nameof(AlgorithmManagerTests)}.{nameof(TestAlgorithmManagerSpeed)}"); var feed = new MockDataFeed(); var transactions = new BacktestingTransactionHandler(); var results = new BacktestingResultHandler(); var realtime = new BacktestingRealTimeHandler(); var leanManager = new NullLeanManager(); var alphas = new NullAlphaHandler(); var token = new CancellationToken(); algorithm.Initialize(); results.Initialize(job, new QuantConnect.Messaging.Messaging(), new Api.Api(), feed, new BacktestingSetupHandler(), transactions); results.SetAlgorithm(algorithm); transactions.Initialize(algorithm, new BacktestingBrokerage(algorithm), results); feed.Initialize(algorithm, job, results, null, null, null); Log.Trace("Starting algorithm manager loop to process " + feed.Count + " time slices"); var sw = Stopwatch.StartNew(); algorithmManager.Run(job, algorithm, feed, transactions, results, realtime, leanManager, alphas, token); sw.Stop(); var thousands = feed.Count / 1000d; var seconds = sw.Elapsed.TotalSeconds; Log.Trace("COUNT: " + feed.Count + " KPS: " + thousands / seconds); }
/// <summary> /// Get an instance of the data feed handler we're requesting for this work. /// </summary> /// <param name="job">Algorithm Node Packet</param> /// <returns>Class Matching IResultHandler Inteface</returns> private static IResultHandler GetResultHandler(AlgorithmNodePacket job) { var rh = default(IResultHandler); if (IsLocal) { return(new ConsoleResultHandler(job)); } switch (job.ResultEndpoint) { //Local backtesting and live trading result handler route messages to the local console. case ResultHandlerEndpoint.Console: Log.Trace("Engine.GetResultHandler(): Selected Console Output."); rh = new ConsoleResultHandler((BacktestNodePacket)job); break; // Backtesting route messages to user browser. case ResultHandlerEndpoint.Backtesting: Log.Trace("Engine.GetResultHandler(): Selected Backtesting API Result Endpoint."); rh = new BacktestingResultHandler((BacktestNodePacket)job); break; // Live trading route messages to user's browser. case ResultHandlerEndpoint.LiveTrading: Log.Trace("Engine.GetResultHandler(): Selected Live Trading API Result Endpoint."); rh = new LiveTradingResultHandler((LiveNodePacket)job); break; } return(rh); }
private IDataFeed RunDataFeed(out FuncDataQueueHandler dataQueueHandler, Func <FuncDataQueueHandler, IEnumerable <BaseData> > getNextTicksFunction = null, Resolution resolution = Resolution.Second, List <string> equities = null, List <string> forex = null) { _algorithm.SetStartDate(_startDate); var lastTime = _manualTimeProvider.GetUtcNow(); getNextTicksFunction = getNextTicksFunction ?? (fdqh => { var time = _manualTimeProvider.GetUtcNow(); if (time == lastTime) { return(Enumerable.Empty <BaseData>()); } lastTime = time; return(fdqh.Subscriptions.Where(symbol => !_algorithm.UniverseManager.ContainsKey(symbol)) // its not a universe .Select(symbol => new Tick(lastTime.ConvertFromUtc(TimeZones.NewYork), symbol, 1, 2) { Quantity = 1, // Symbol could not be in the Securities collections for the custom Universe tests. AlgorithmManager is in charge of adding them, and we are not executing that code here. TickType = _algorithm.Securities.ContainsKey(symbol) ? _algorithm.Securities[symbol].SubscriptionDataConfig.TickType : TickType.Trade })); }); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); dataQueueHandler = new FuncDataQueueHandler(getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, _manualTimeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); var dataManager = new DataManager(feed, new UniverseSelection(feed, _algorithm), _algorithm.Settings, _algorithm.TimeKeeper); _algorithm.SubscriptionManager.SetDataManager(dataManager); _algorithm.AddSecurities(resolution, equities, forex); feed.Initialize(_algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); _algorithm.PostInitialize(); Thread.Sleep(150); // small handicap for the data to be pumped so TimeSlices have data of all subscriptions var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); // wait for feed.Run to actually begin feedThreadStarted.WaitOne(); return(feed); }
private IDataFeed RunDataFeed(out FuncDataQueueHandler dataQueueHandler, Func <FuncDataQueueHandler, IEnumerable <BaseData> > getNextTicksFunction = null, Resolution resolution = Resolution.Second, List <string> equities = null, List <string> forex = null, List <string> crypto = null) { _algorithm.SetStartDate(_startDate); var lastTime = _manualTimeProvider.GetUtcNow(); getNextTicksFunction = getNextTicksFunction ?? (fdqh => { var time = _manualTimeProvider.GetUtcNow(); if (time == lastTime) { return(Enumerable.Empty <BaseData>()); } lastTime = time; var tickTime = lastTime.AddMinutes(-1).ConvertFromUtc(TimeZones.NewYork); return(fdqh.Subscriptions.Where(symbol => !_algorithm.UniverseManager.ContainsKey(symbol)) // its not a universe .Select(symbol => new Tick(tickTime, symbol, 1, 2) { Quantity = 1, // Symbol could not be in the Securities collections for the custom Universe tests. AlgorithmManager is in charge of adding them, and we are not executing that code here. TickType = _algorithm.Securities.ContainsKey(symbol) ? _algorithm.Securities[symbol].SubscriptionDataConfig.TickType : TickType.Trade }).ToList()); }); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); dataQueueHandler = new FuncDataQueueHandler(getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var securityService = new SecurityService(_algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, _algorithm); _algorithm.Securities.SetSecurityService(securityService); _dataManager = new DataManager(feed, new UniverseSelection(_algorithm, securityService), _algorithm, _algorithm.TimeKeeper, marketHoursDatabase, true); _algorithm.SubscriptionManager.SetDataManager(_dataManager); _algorithm.AddSecurities(resolution, equities, forex, crypto); _synchronizer = new TestableLiveSynchronizer(_manualTimeProvider); _synchronizer.Initialize(_algorithm, _dataManager); feed.Initialize(_algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, _dataManager, _synchronizer); _algorithm.PostInitialize(); Thread.Sleep(150); // small handicap for the data to be pumped so TimeSlices have data of all subscriptions return(feed); }
private void LaunchLean() { Config.Set("environment", "backtesting"); if (!string.IsNullOrEmpty(_config.AlgorithmTypeName)) { Config.Set("algorithm-type-name", _config.AlgorithmTypeName); } if (!string.IsNullOrEmpty(_config.AlgorithmLocation)) { Config.Set("algorithm-location", Path.GetFileName(_config.AlgorithmLocation)); } if (!string.IsNullOrEmpty(_config.DataFolder)) { Config.Set("data-folder", _config.DataFolder); } var systemHandlers = LeanEngineSystemHandlers.FromConfiguration(Composer.Instance); systemHandlers.Initialize(); Log.LogHandler = Composer.Instance.GetExportedValueByTypeName <ILogHandler>(Config.Get("log-handler", "CompositeLogHandler")); //Log.DebuggingEnabled = false; //Log.DebuggingLevel = 1; LeanEngineAlgorithmHandlers leanEngineAlgorithmHandlers; try { leanEngineAlgorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(Composer.Instance); _resultsHandler = (BacktestingResultHandler)leanEngineAlgorithmHandlers.Results; } catch (CompositionException compositionException) { Log.Error("Engine.Main(): Failed to load library: " + compositionException); throw; } string algorithmPath; AlgorithmNodePacket job = systemHandlers.JobQueue.NextJob(out algorithmPath); try { var _engine = new Engine(systemHandlers, leanEngineAlgorithmHandlers, Config.GetBool("live-mode")); _engine.Run(job, algorithmPath); } finally { Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId); // clean up resources systemHandlers.Dispose(); leanEngineAlgorithmHandlers.Dispose(); Log.LogHandler.Dispose(); } }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var feed = new FileSystemDataFeed(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var dataPermissionManager = new DataPermissionManager(); var dataManager = new DataManager(feed, new UniverseSelection( algorithm, new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(algorithm.Portfolio)), dataPermissionManager, new DefaultDataProvider()), algorithm, algorithm.TimeKeeper, marketHoursDatabase, false, RegisteredSecurityDataTypesProvider.Null, dataPermissionManager); algorithm.SubscriptionManager.SetDataManager(dataManager); var synchronizer = new Synchronizer(); synchronizer.Initialize(algorithm, dataManager); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider, dataManager, synchronizer, dataPermissionManager.DataChannelProvider); algorithm.Initialize(); algorithm.PostInitialize(); var cancellationTokenSource = new CancellationTokenSource(); var count = 0; var stopwatch = Stopwatch.StartNew(); var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in synchronizer.StreamData(cancellationTokenSource.Token)) { if (timeSlice.Time.Month != lastMonth) { var elapsed = stopwatch.Elapsed.TotalSeconds; var thousands = count / 1000d; Log.Trace($"{DateTime.Now} - Time: {timeSlice.Time}: KPS: {thousands / elapsed}"); lastMonth = timeSlice.Time.Month; } count++; } Log.Trace("Count: " + count); stopwatch.Stop(); feed.Exit(); dataManager.RemoveAllSubscriptions(); Log.Trace($"Elapsed time: {stopwatch.Elapsed} KPS: {count / 1000d / stopwatch.Elapsed.TotalSeconds}"); }
public void TestAlgorithmManagerSpeed() { var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var algorithmManager = new AlgorithmManager(false); var job = new BacktestNodePacket(1, 2, "3", null, 9m, $"{nameof(AlgorithmManagerTests)}.{nameof(TestAlgorithmManagerSpeed)}"); var feed = new MockDataFeed(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var dataPermissionManager = new DataPermissionManager(); var dataManager = new DataManager(feed, new UniverseSelection( algorithm, new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(algorithm.Portfolio)), dataPermissionManager, new DefaultDataProvider()), algorithm, algorithm.TimeKeeper, marketHoursDatabase, false, RegisteredSecurityDataTypesProvider.Null, dataPermissionManager); algorithm.SubscriptionManager.SetDataManager(dataManager); var transactions = new BacktestingTransactionHandler(); var results = new BacktestingResultHandler(); var realtime = new BacktestingRealTimeHandler(); var leanManager = new NullLeanManager(); var alphas = new NullAlphaHandler(); var token = new CancellationToken(); var nullSynchronizer = new NullSynchronizer(algorithm); algorithm.Initialize(); algorithm.PostInitialize(); results.Initialize(job, new QuantConnect.Messaging.Messaging(), new Api.Api(), transactions); results.SetAlgorithm(algorithm, algorithm.Portfolio.TotalPortfolioValue); transactions.Initialize(algorithm, new BacktestingBrokerage(algorithm), results); feed.Initialize(algorithm, job, results, null, null, null, dataManager, null, null); Log.Trace("Starting algorithm manager loop to process " + nullSynchronizer.Count + " time slices"); var sw = Stopwatch.StartNew(); algorithmManager.Run(job, algorithm, nullSynchronizer, transactions, results, realtime, leanManager, alphas, token); sw.Stop(); realtime.Exit(); results.Exit(); var thousands = nullSynchronizer.Count / 1000d; var seconds = sw.Elapsed.TotalSeconds; Log.Trace("COUNT: " + nullSynchronizer.Count + " KPS: " + thousands / seconds); }
public void EmitsData() { var algorithm = new AlgorithmStub(forex: new List <string> { "EURUSD" }); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); var dataProvider = new DefaultDataProvider(); var lastTime = DateTime.MinValue; var timeProvider = new RealTimeProvider(); var dataQueueHandler = new FuncDataQueueHandler(fdqh => { var time = timeProvider.GetUtcNow().ConvertFromUtc(TimeZones.EasternStandard); if (time == lastTime) { return(Enumerable.Empty <BaseData>()); } lastTime = time; return(Enumerable.Range(0, 9).Select(x => new Tick(time.AddMilliseconds(x * 100), Symbols.EURUSD, 1.3m, 1.2m, 1.3m))); }); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, timeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), dataProvider); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); // wait for feed.Run to actually begin feedThreadStarted.WaitOne(); var emittedData = false; ConsumeBridge(feed, TimeSpan.FromSeconds(10), true, ts => { if (ts.Slice.Count != 0) { emittedData = true; Console.WriteLine("HasData: " + ts.Slice.Bars[Symbols.EURUSD].EndTime); Console.WriteLine(); } }); Assert.IsTrue(emittedData); }
private static void LaunchLean() { /* Config.Set("environment", "backtesting"); * string algorithm = (string)AppDomain.CurrentDomain.GetData("AlgorithmTypeName"); * string path = (string)AppDomain.CurrentDomain.GetData("AlgorithmLocation"); * * Config.Set("algorithm-type-name", algorithm); * if (!string.IsNullOrEmpty(path)) * { * Config.Set("algorithm-location", path); * } */ // Config.Set("result-handler", "BacktestingResultHandler"); Config.Set("messaging-handler", "QuantConnect.Lean.TestProject.Messaging"); var systemHandlers = LeanEngineSystemHandlers.FromConfiguration(Composer.Instance); systemHandlers.Initialize(); Log.LogHandler = Composer.Instance.GetExportedValueByTypeName <ILogHandler>(Config.Get("log-handler", "CompositeLogHandler")); //Log.DebuggingEnabled = false; //Log.DebuggingLevel = 1; LeanEngineAlgorithmHandlers leanEngineAlgorithmHandlers; try { leanEngineAlgorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(Composer.Instance); _resultsHandler = (BacktestingResultHandler)leanEngineAlgorithmHandlers.Results; } catch (CompositionException compositionException) { Log.Error("Engine.Main(): Failed to load library: " + compositionException); throw; } string algorithmPath; AlgorithmNodePacket job = systemHandlers.JobQueue.NextJob(out algorithmPath); try { var _engine = new QuantConnect.Lean.Engine.Engine(systemHandlers, leanEngineAlgorithmHandlers, Config.GetBool("live-mode")); _engine.Run(job, algorithmPath); } finally { Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId); // clean up resources systemHandlers.Dispose(); leanEngineAlgorithmHandlers.Dispose(); Log.LogHandler.Dispose(); } }
public AlgorithmRunnerResults( string algorithm, Language language, AlgorithmManager manager, BacktestingResultHandler results) { Algorithm = algorithm; Language = language; AlgorithmManager = manager; Results = results; }
public void FastExitsDoNotThrowUnhandledExceptions() { var algorithm = new AlgorithmStub(); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); var feed = new TestableLiveTradingDataFeed(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var securityService = new SecurityService( algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm); algorithm.Securities.SetSecurityService(securityService); var dataManager = new DataManager(feed, new UniverseSelection(feed, algorithm, securityService), algorithm.Settings, algorithm.TimeKeeper, marketHoursDatabase); algorithm.SubscriptionManager.SetDataManager(dataManager); algorithm.AddSecurities(Resolution.Tick, Enumerable.Range(0, 20).Select(x => x.ToString()).ToList()); var getNextTicksFunction = Enumerable.Range(0, 20).Select(x => new Tick { Symbol = SymbolCache.GetSymbol(x.ToString()) }).ToList(); feed.DataQueueHandler = new FuncDataQueueHandler(handler => getNextTicksFunction); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); var unhandledExceptionWasThrown = false; try { feed.Exit(); } catch (Exception ex) { QuantConnect.Logging.Log.Error(ex.ToString()); unhandledExceptionWasThrown = true; } Thread.Sleep(500); Assert.IsFalse(unhandledExceptionWasThrown); }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var feed = new FileSystemDataFeed(); var dataManager = new DataManager(feed, new UniverseSelection(feed, algorithm), algorithm.Settings, algorithm.TimeKeeper); algorithm.SubscriptionManager.SetDataManager(dataManager); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider, dataManager); algorithm.Initialize(); algorithm.PostInitialize(); var feedThreadStarted = new ManualResetEvent(false); var dataFeedThread = new Thread(() => { feedThreadStarted.Set(); feed.Run(); }) { IsBackground = true }; dataFeedThread.Start(); feedThreadStarted.WaitOne(); var count = 0; var stopwatch = Stopwatch.StartNew(); var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in feed) { if (timeSlice.Time.Month != lastMonth) { var elapsed = stopwatch.Elapsed.TotalSeconds; var thousands = count / 1000d; Console.WriteLine($"{DateTime.Now} - Time: {timeSlice.Time}: KPS: {thousands/elapsed}"); lastMonth = timeSlice.Time.Month; } count++; } Console.WriteLine("Count: " + count); stopwatch.Stop(); Console.WriteLine($"Elapsed time: {stopwatch.Elapsed} KPS: {count/1000d/stopwatch.Elapsed.TotalSeconds}"); }
public void OptionChainEnumerator(bool fillForward) { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var feed = new FileSystemDataFeed(); var algorithm = new AlgorithmStub(feed); algorithm.Transactions.SetOrderProcessor(new FakeOrderProcessor()); algorithm.SetStartDate(new DateTime(2014, 06, 06)); algorithm.SetEndDate(new DateTime(2014, 06, 09)); algorithm.SetOptionChainProvider(new BacktestingOptionChainProvider(TestGlobals.DataCacheProvider, TestGlobals.MapFileProvider)); var dataPermissionManager = new DataPermissionManager(); using var synchronizer = new Synchronizer(); synchronizer.Initialize(algorithm, algorithm.DataManager); feed.Initialize(algorithm, job, resultHandler, TestGlobals.MapFileProvider, TestGlobals.FactorFileProvider, TestGlobals.DataProvider, algorithm.DataManager, synchronizer, dataPermissionManager.DataChannelProvider); var option = algorithm.AddOption("AAPL", fillDataForward: fillForward); option.SetFilter(filter => filter.FrontMonth()); algorithm.PostInitialize(); using var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)); var count = 0; var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in synchronizer.StreamData(cancellationTokenSource.Token)) { if (!timeSlice.IsTimePulse && timeSlice.UniverseData?.Count > 0) { var baseDataCollection = timeSlice.UniverseData.Single().Value; if (baseDataCollection.Symbol.SecurityType == SecurityType.Option) { var nyTime = timeSlice.Time.ConvertFromUtc(algorithm.TimeZone); Assert.AreEqual(new TimeSpan(9, 30, 0).Add(TimeSpan.FromMinutes((count % 390) + 1)), nyTime.TimeOfDay, $"Failed on: {nyTime}"); Assert.IsNotNull(baseDataCollection.Underlying); // make sure the underlying time stamp is getting updated Assert.AreEqual(nyTime.TimeOfDay, baseDataCollection.Underlying.EndTime.TimeOfDay); Assert.AreEqual(nyTime.TimeOfDay, baseDataCollection.EndTime.ConvertFromUtc(algorithm.TimeZone).TimeOfDay); Assert.IsTrue(!baseDataCollection.FilteredContracts.IsNullOrEmpty()); count++; } } } feed.Exit(); algorithm.DataManager.RemoveAllSubscriptions(); // 9:30 to 15:59 -> 6.5 hours * 60 => 390 minutes * 2 days = 780 Assert.AreEqual(780, count); }
public void FastExitsDoNotThrowUnhandledExceptions() { var algorithm = new AlgorithmStub(); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); var feed = new TestableLiveTradingDataFeed(); var dataManager = new DataManager(feed, new UniverseSelection(feed, algorithm), algorithm.Settings, algorithm.TimeKeeper); algorithm.SubscriptionManager.SetDataManager(dataManager); algorithm.AddSecurities(Resolution.Tick, Enumerable.Range(0, 20).Select(x => x.ToString()).ToList()); var getNextTicksFunction = Enumerable.Range(0, 20).Select(x => new Tick { Symbol = SymbolCache.GetSymbol(x.ToString()) }).ToList(); feed.DataQueueHandler = new FuncDataQueueHandler(handler => getNextTicksFunction); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); var feedThreadStarted = new ManualResetEvent(false); var unhandledExceptionWasThrown = false; Task.Run(() => { try { feedThreadStarted.Set(); feed.Run(); } catch (Exception ex) { QuantConnect.Logging.Log.Error(ex.ToString()); unhandledExceptionWasThrown = true; } }); feedThreadStarted.WaitOne(); feed.Exit(); Thread.Sleep(1000); Assert.IsFalse(unhandledExceptionWasThrown); }
public void TestDataFeedEnumeratorStackSpeed() { var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; algorithm.Initialize(); algorithm.PostInitialize(); var dataProvider = new DefaultDataProvider(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var factory = new SubscriptionDataReaderSubscriptionEnumeratorFactory(resultHandler, mapFileProvider, factorFileProvider, dataProvider, true, enablePriceScaling: false); var universe = algorithm.UniverseManager.Single().Value; var security = algorithm.Securities.Single().Value; var securityConfig = security.Subscriptions.First(); var subscriptionRequest = new SubscriptionRequest(false, universe, security, securityConfig, algorithm.StartDate, algorithm.EndDate); var enumerator = factory.CreateEnumerator(subscriptionRequest, dataProvider); var count = 0; var stopwatch = Stopwatch.StartNew(); var lastMonth = algorithm.StartDate.Month; while (enumerator.MoveNext()) { var current = enumerator.Current; if (current == null) { Log.Trace("ERROR: Current is null"); continue; } if (current.Time.Month != lastMonth) { var elapsed = stopwatch.Elapsed.TotalSeconds; var thousands = count / 1000d; Log.Trace($"{DateTime.Now} - Time: {current.Time}: KPS: {thousands / elapsed}"); lastMonth = current.Time.Month; } count++; } Log.Trace("Count: " + count); stopwatch.Stop(); enumerator.Dispose(); factory.DisposeSafely(); Log.Trace($"Elapsed time: {stopwatch.Elapsed} KPS: {count / 1000d / stopwatch.Elapsed.TotalSeconds}"); }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var feed = new FileSystemDataFeed(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var dataManager = new DataManager(feed, new UniverseSelection(feed, algorithm, new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm)), algorithm.Settings, algorithm.TimeKeeper, marketHoursDatabase); algorithm.SubscriptionManager.SetDataManager(dataManager); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider, dataManager); algorithm.Initialize(); algorithm.PostInitialize(); var count = 0; var stopwatch = Stopwatch.StartNew(); var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in feed) { if (timeSlice.Time.Month != lastMonth) { var elapsed = stopwatch.Elapsed.TotalSeconds; var thousands = count / 1000d; Console.WriteLine($"{DateTime.Now} - Time: {timeSlice.Time}: KPS: {thousands/elapsed}"); lastMonth = timeSlice.Time.Month; } count++; } Console.WriteLine("Count: " + count); stopwatch.Stop(); feed.Exit(); Console.WriteLine($"Elapsed time: {stopwatch.Elapsed} KPS: {count/1000d/stopwatch.Elapsed.TotalSeconds}"); }
private PortfolioLooperAlgorithm CreateAlgorithm(IEnumerable <Order> orders) { var algorithm = new PortfolioLooperAlgorithm(100000m, orders); // Create MHDB and Symbol properties DB instances for the DataManager var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var dataPermissionManager = new DataPermissionManager(); var dataManager = new DataManager(new QuantConnect.Report.MockDataFeed(), new UniverseSelection( algorithm, new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(algorithm.Portfolio)), dataPermissionManager), algorithm, algorithm.TimeKeeper, marketHoursDatabase, false, RegisteredSecurityDataTypesProvider.Null, dataPermissionManager); var securityService = new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(algorithm.Portfolio)); var transactions = new BacktestingTransactionHandler(); var results = new BacktestingResultHandler(); // Initialize security services and other properties so that we // don't get null reference exceptions during our re-calculation algorithm.Securities.SetSecurityService(securityService); algorithm.SubscriptionManager.SetDataManager(dataManager); return(algorithm); }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = new BenchmarkTest(); var feed = new FileSystemDataFeed(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider); algorithm.Initialize(); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); feedThreadStarted.WaitOne(); var stopwatch = Stopwatch.StartNew(); var lastMonth = -1; var count = 0; foreach (var timeSlice in feed) { if (timeSlice.Time.Month != lastMonth) { Console.WriteLine(DateTime.Now + " - Time: " + timeSlice.Time); lastMonth = timeSlice.Time.Month; } count++; } Console.WriteLine("Count: " + count); stopwatch.Stop(); Console.WriteLine("Elapsed time: " + stopwatch.Elapsed); }
public static AlgorithmRunnerResults RunLocalBacktest( string algorithm, Dictionary <string, string> expectedStatistics, AlphaRuntimeStatistics expectedAlphaStatistics, Language language, AlgorithmStatus expectedFinalStatus, DateTime?startDate = null, DateTime?endDate = null, string setupHandler = "RegressionSetupHandlerWrapper", decimal?initialCash = null) { AlgorithmManager algorithmManager = null; var statistics = new Dictionary <string, string>(); var alphaStatistics = new AlphaRuntimeStatistics(new TestAccountCurrencyProvider()); BacktestingResultHandler results = null; Composer.Instance.Reset(); SymbolCache.Clear(); var ordersLogFile = string.Empty; var logFile = $"./regression/{algorithm}.{language.ToLower()}.log"; Directory.CreateDirectory(Path.GetDirectoryName(logFile)); File.Delete(logFile); try { // set the configuration up Config.Set("algorithm-type-name", algorithm); Config.Set("live-mode", "false"); Config.Set("environment", ""); Config.Set("messaging-handler", "QuantConnect.Messaging.Messaging"); Config.Set("job-queue-handler", "QuantConnect.Queues.JobQueue"); Config.Set("setup-handler", setupHandler); Config.Set("history-provider", "RegressionHistoryProviderWrapper"); Config.Set("api-handler", "QuantConnect.Api.Api"); Config.Set("result-handler", "QuantConnect.Lean.Engine.Results.RegressionResultHandler"); Config.Set("algorithm-language", language.ToString()); Config.Set("algorithm-location", language == Language.Python ? "../../../Algorithm.Python/" + algorithm + ".py" : "QuantConnect.Algorithm." + language + ".dll"); // Store initial log variables var initialLogHandler = Log.LogHandler; var initialDebugEnabled = Log.DebuggingEnabled; // Log handlers specific to this test function var newLogHandlers = new ILogHandler[] { new ConsoleErrorLogHandler(), new FileLogHandler(logFile, false) }; using (Log.LogHandler = new CompositeLogHandler(newLogHandlers)) using (var algorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(Composer.Instance)) using (var systemHandlers = LeanEngineSystemHandlers.FromConfiguration(Composer.Instance)) using (var workerThread = new TestWorkerThread()) { Log.DebuggingEnabled = true; Log.Trace(""); Log.Trace("{0}: Running " + algorithm + "...", DateTime.UtcNow); Log.Trace(""); // run the algorithm in its own thread var engine = new Lean.Engine.Engine(systemHandlers, algorithmHandlers, false); Task.Factory.StartNew(() => { try { string algorithmPath; var job = (BacktestNodePacket)systemHandlers.JobQueue.NextJob(out algorithmPath); job.BacktestId = algorithm; job.PeriodStart = startDate; job.PeriodFinish = endDate; if (initialCash.HasValue) { job.CashAmount = new CashAmount(initialCash.Value, Currencies.USD); } algorithmManager = new AlgorithmManager(false, job); systemHandlers.LeanManager.Initialize(systemHandlers, algorithmHandlers, job, algorithmManager); engine.Run(job, algorithmManager, algorithmPath, workerThread); ordersLogFile = ((RegressionResultHandler)algorithmHandlers.Results).LogFilePath; } catch (Exception e) { Log.Trace($"Error in AlgorithmRunner task: {e}"); } }).Wait(); var backtestingResultHandler = (BacktestingResultHandler)algorithmHandlers.Results; results = backtestingResultHandler; statistics = backtestingResultHandler.FinalStatistics; var defaultAlphaHandler = (DefaultAlphaHandler)algorithmHandlers.Alphas; alphaStatistics = defaultAlphaHandler.RuntimeStatistics; } // Reset settings to initial values Log.LogHandler = initialLogHandler; Log.DebuggingEnabled = initialDebugEnabled; } catch (Exception ex) { if (expectedFinalStatus != AlgorithmStatus.RuntimeError) { Log.Error("{0} {1}", ex.Message, ex.StackTrace); } } if (algorithmManager?.State != expectedFinalStatus) { Assert.Fail($"Algorithm state should be {expectedFinalStatus} and is: {algorithmManager?.State}"); } foreach (var stat in expectedStatistics) { Assert.AreEqual(true, statistics.ContainsKey(stat.Key), "Missing key: " + stat.Key); Assert.AreEqual(stat.Value, statistics[stat.Key], "Failed on " + stat.Key); } if (expectedAlphaStatistics != null) { AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.MeanPopulationScore.Direction); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.MeanPopulationScore.Magnitude); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.RollingAveragedPopulationScore.Direction); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.RollingAveragedPopulationScore.Magnitude); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.LongShortRatio); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.TotalInsightsClosed); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.TotalInsightsGenerated); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.TotalAccumulatedEstimatedAlphaValue); AssertAlphaStatistics(expectedAlphaStatistics, alphaStatistics, s => s.TotalInsightsAnalysisCompleted); } // we successfully passed the regression test, copy the log file so we don't have to continually // re-run master in order to compare against a passing run var passedFile = logFile.Replace("./regression/", "./passed/"); Directory.CreateDirectory(Path.GetDirectoryName(passedFile)); File.Delete(passedFile); File.Copy(logFile, passedFile); var passedOrderLogFile = ordersLogFile.Replace("./regression/", "./passed/"); Directory.CreateDirectory(Path.GetDirectoryName(passedFile)); File.Delete(passedOrderLogFile); if (File.Exists(ordersLogFile)) { File.Copy(ordersLogFile, passedOrderLogFile); } return(new AlgorithmRunnerResults(algorithm, language, algorithmManager, results)); }
public OptimizerResultHandler(BacktestingResultHandler handler) { _shadow = handler; Patch(); }
public OptimizerResultHandler() { _shadow = new BacktestingResultHandler(); Patch(); }
public void FutureChainEnumerator(bool fillForward) { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var feed = new FileSystemDataFeed(); var algorithm = new AlgorithmStub(feed); algorithm.Transactions.SetOrderProcessor(new FakeOrderProcessor()); algorithm.SetStartDate(new DateTime(2013, 10, 07)); algorithm.SetEndDate(new DateTime(2013, 10, 08)); algorithm.SetFutureChainProvider(new BacktestingFutureChainProvider(TestGlobals.DataCacheProvider)); var dataPermissionManager = new DataPermissionManager(); using var synchronizer = new Synchronizer(); synchronizer.Initialize(algorithm, algorithm.DataManager); feed.Initialize(algorithm, job, resultHandler, TestGlobals.MapFileProvider, TestGlobals.FactorFileProvider, TestGlobals.DataProvider, algorithm.DataManager, synchronizer, dataPermissionManager.DataChannelProvider); var future = algorithm.AddFuture("ES", fillDataForward: fillForward); future.SetFilter(0, 300); algorithm.PostInitialize(); using var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)); var count = 0L; var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in synchronizer.StreamData(cancellationTokenSource.Token)) { if (!timeSlice.IsTimePulse && timeSlice.UniverseData?.Count > 0) { var nyTime = timeSlice.Time.ConvertFromUtc(algorithm.TimeZone); var currentExpectedTime = new TimeSpan(0, 0, 0).Add(TimeSpan.FromMinutes(count % (24 * 60))); while (!future.Exchange.DateTimeIsOpen(nyTime.Date.Add(currentExpectedTime).AddMinutes(-1))) { // skip closed market times currentExpectedTime = new TimeSpan(0, 0, 0).Add(TimeSpan.FromMinutes(++count % (24 * 60))); } var universeData = timeSlice.UniverseData.OrderBy(kvp => kvp.Key.Configuration.Symbol).ToList(); var chainData = universeData[0].Value; Log.Trace($"{nyTime}. Count: {count}. Universe Data Count {universeData.Count}"); Assert.AreEqual(currentExpectedTime, nyTime.TimeOfDay, $"Failed on: {nyTime}. Count: {count}"); Assert.IsTrue(timeSlice.UniverseData.All(kvp => kvp.Value.EndTime.ConvertFromUtc(algorithm.TimeZone).TimeOfDay == nyTime.TimeOfDay)); if (chainData.FilteredContracts.IsNullOrEmpty()) { Assert.AreEqual(new DateTime(2013, 10, 09), nyTime, $"Unexpected chain FilteredContracts was empty on {nyTime}"); } if (universeData.Count == 1) { // the chain Assert.IsTrue(universeData.Any(kvp => kvp.Key.Configuration.Symbol == future.Symbol)); } else { // we have 2 universe data, the chain and the continuous future Assert.AreEqual(2, universeData.Count); Assert.IsTrue(universeData.All(kvp => kvp.Key.Configuration.Symbol.SecurityType == SecurityType.Future)); Assert.IsTrue(universeData.Any(kvp => kvp.Key.Configuration.Symbol == future.Symbol)); Assert.IsTrue(universeData.Any(kvp => kvp.Key.Configuration.Symbol.ID.Symbol.Contains("CONTINUOUS", StringComparison.InvariantCultureIgnoreCase))); var continuousData = universeData[1].Value; Assert.AreEqual(currentExpectedTime, nyTime.TimeOfDay, $"Failed on: {nyTime}"); Assert.IsTrue(!chainData.FilteredContracts.IsNullOrEmpty()); } count++; } } feed.Exit(); algorithm.DataManager.RemoveAllSubscriptions(); // 2 days worth of minute data Assert.AreEqual(24 * 2 * 60 + 1, count); }
private void LaunchLean() { Config.Set("environment", "backtesting"); if (!string.IsNullOrEmpty(_config.AlgorithmTypeName)) { Config.Set("algorithm-type-name", _config.AlgorithmTypeName); } if (!string.IsNullOrEmpty(_config.AlgorithmLocation)) { Config.Set("algorithm-location", Path.GetFileName(_config.AlgorithmLocation)); } if (!string.IsNullOrEmpty(_config.DataFolder)) { Config.Set("data-folder", _config.DataFolder); } if (_config.StartDate.HasValue) { Config.Set("startDate", _config.StartDate.Value.ToString("O")); } if (_config.EndDate.HasValue) { Config.Set("endDate", _config.EndDate.Value.ToString("O")); } var systemHandlers = LeanEngineSystemHandlers.FromConfiguration(Composer.Instance); systemHandlers.Initialize(); var logFileName = "log_" + Guid.NewGuid().ToString() + ".txt"; var logHandlers = new ILogHandler[] { new FileLogHandler(logFileName, true) }; using (Log.LogHandler = new CompositeLogHandler(logHandlers)) { LeanEngineAlgorithmHandlers leanEngineAlgorithmHandlers; try { leanEngineAlgorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(Composer.Instance); _resultsHandler = (BacktestingResultHandler)leanEngineAlgorithmHandlers.Results; } catch (CompositionException compositionException) { Log.Error("Engine.Main(): Failed to load library: " + compositionException); throw; } string algorithmPath; AlgorithmNodePacket job = systemHandlers.JobQueue.NextJob(out algorithmPath); try { var _engine = new Engine(systemHandlers, leanEngineAlgorithmHandlers, Config.GetBool("live-mode")); _engine.Run(job, algorithmPath); } finally { Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId); // clean up resources systemHandlers.Dispose(); leanEngineAlgorithmHandlers.Dispose(); } } }
/// <summary> /// Creates an instance of the PortfolioLooper class /// </summary> /// <param name="startingCash">Equity curve</param> /// <param name="orders">Order events</param> /// <param name="resolution">Optional parameter to override default resolution (Hourly)</param> private PortfolioLooper(double startingCash, List <Order> orders, Resolution resolution = _resolution) { // Initialize the providers that the HistoryProvider requires var factorFileProvider = Composer.Instance.GetExportedValueByTypeName <IFactorFileProvider>("LocalDiskFactorFileProvider"); var mapFileProvider = Composer.Instance.GetExportedValueByTypeName <IMapFileProvider>("LocalDiskMapFileProvider"); var dataCacheProvider = new ZipDataCacheProvider(new DefaultDataProvider(), false); var historyProvider = Composer.Instance.GetExportedValueByTypeName <IHistoryProvider>("SubscriptionDataReaderHistoryProvider"); historyProvider.Initialize(new HistoryProviderInitializeParameters(null, null, null, dataCacheProvider, mapFileProvider, factorFileProvider, (_) => { }, false)); Algorithm = new PortfolioLooperAlgorithm((decimal)startingCash, orders); Algorithm.SetHistoryProvider(historyProvider); // Dummy LEAN datafeed classes and initializations that essentially do nothing var job = new BacktestNodePacket(1, 2, "3", null, 9m, $""); var feed = new MockDataFeed(); // Create MHDB and Symbol properties DB instances for the DataManager var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); _dataManager = new DataManager(feed, new UniverseSelection( Algorithm, new SecurityService(Algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, Algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(Algorithm.Portfolio))), Algorithm, Algorithm.TimeKeeper, marketHoursDatabase, false, RegisteredSecurityDataTypesProvider.Null); _securityService = new SecurityService(Algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, Algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(Algorithm.Portfolio)); var transactions = new BacktestingTransactionHandler(); var results = new BacktestingResultHandler(); // Initialize security services and other properties so that we // don't get null reference exceptions during our re-calculation Algorithm.Securities.SetSecurityService(_securityService); Algorithm.SubscriptionManager.SetDataManager(_dataManager); // Initializes all the proper Securities from the orders provided by the user Algorithm.FromOrders(orders); // Initialize the algorithm Algorithm.Initialize(); Algorithm.PostInitialize(); // More initialization, this time with Algorithm and other misc. classes results.Initialize(job, new Messaging.Messaging(), new Api.Api(), transactions); results.SetAlgorithm(Algorithm, Algorithm.Portfolio.TotalPortfolioValue); transactions.Initialize(Algorithm, new BacktestingBrokerage(Algorithm), results); feed.Initialize(Algorithm, job, results, null, null, null, _dataManager, null); // Begin setting up the currency conversion feed if needed var coreSecurities = Algorithm.Securities.Values.ToList(); if (coreSecurities.Any(x => x.Symbol.SecurityType == SecurityType.Forex || x.Symbol.SecurityType == SecurityType.Crypto)) { BaseSetupHandler.SetupCurrencyConversions(Algorithm, _dataManager.UniverseSelection); var conversionSecurities = Algorithm.Securities.Values.Where(s => !coreSecurities.Contains(s)).ToList(); // Skip the history request if we don't need to convert anything if (conversionSecurities.Any()) { // Point-in-time Slices to convert FX and Crypto currencies to the portfolio currency _conversionSlices = GetHistory(Algorithm, conversionSecurities, resolution); } } }