/// <summary> /// Get an instance of the data feed handler we're requesting for this work. /// </summary> /// <param name="algorithm">User algorithm to scan for securities</param> /// <param name="job">Algorithm Node Packet</param> /// <returns>Class matching IDataFeed Interface</returns> private static IDataFeed GetDataFeedHandler(IAlgorithm algorithm, AlgorithmNodePacket job) { var df = default(IDataFeed); switch (job.DataEndpoint) { //default: ////Backtesting: case DataFeedEndpoint.Backtesting: df = new BacktestingDataFeed(algorithm, (BacktestNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected Backtesting Datafeed"); break; case DataFeedEndpoint.Database: df = new DatabaseDataFeed(algorithm, (BacktestNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected Database Datafeed"); break; //Operation from local files: case DataFeedEndpoint.FileSystem: df = new FileSystemDataFeed(algorithm, (BacktestNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected FileSystem Datafeed"); break; //Live Trading Data Source: case DataFeedEndpoint.LiveTrading: var ds = Composer.Instance.GetExportedValueByTypeName <IDataQueueHandler>(Config.Get("data-queue-handler", "LiveDataQueue")); df = new LiveTradingDataFeed(algorithm, (LiveNodePacket)job, ds); Log.Trace("Engine.GetDataFeedHandler(): Selected LiveTrading Datafeed"); break; } return(df); }
/// <summary> /// Get an instance of the data feed handler we're requesting for this work. /// </summary> /// <param name="algorithm">User algorithm to scan for securities</param> /// <param name="job">Algorithm Node Packet</param> /// <returns>Class matching IDataFeed Interface</returns> private static IDataFeed GetDataFeedHandler(IAlgorithm algorithm, AlgorithmNodePacket job) { var df = default(IDataFeed); switch (job.DataEndpoint) { //default: ////Backtesting: case DataFeedEndpoint.Backtesting: df = new BacktestingDataFeed(algorithm, (BacktestNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected Backtesting Datafeed"); break; //Operation from local files: case DataFeedEndpoint.FileSystem: df = new FileSystemDataFeed(algorithm, (BacktestNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected FileSystem Datafeed"); break; //Live Trading Data Source: case DataFeedEndpoint.LiveTrading: df = new PaperTradingDataFeed(algorithm, (LiveNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected LiveTrading Datafeed"); break; case DataFeedEndpoint.Test: var feed = new TestLiveTradingDataFeed(algorithm, (LiveNodePacket)job); df = feed; Log.Trace("Engine.GetDataFeedHandler(): Selected Test Datafeed at " + feed.FastForward + "x"); break; } return(df); }
/// <summary> /// Get an instance of the data feed handler we're requesting for this work. /// </summary> /// <param name="algorithm">User algorithm to scan for securities</param> /// <param name="job">Algorithm Node Packet</param> /// <param name="brokerage">Brokerage instance to avoid access token duplication</param> /// <returns>Class matching IDataFeed Interface</returns> private static IDataFeed GetDataFeedHandler(IAlgorithm algorithm, IBrokerage brokerage, AlgorithmNodePacket job) { var df = default(IDataFeed); switch (job.DataEndpoint) { //default: ////Backtesting: case DataFeedEndpoint.Backtesting: df = new BacktestingDataFeed(algorithm, (BacktestNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected Backtesting Datafeed"); break; //Operation from local files: case DataFeedEndpoint.FileSystem: df = new FileSystemDataFeed(algorithm, (BacktestNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected FileSystem Datafeed"); break; //Tradier Data Source: case DataFeedEndpoint.Tradier: df = new TradierDataFeed(algorithm, brokerage, (LiveNodePacket)job); Log.Trace("Engine.GetDataFeedHandler(): Selected Tradier Datafeed"); break; } return(df); }
/// <summary> /// Launches a Lean Engine using a parameter /// </summary> /// <param name="val">The paramater to use when launching lean. </param> private void LaunchLean(string val) { Config.Set("environment", "backtesting"); string algorithm = val; // Set the algorithm in Config. Here is where you can customize Config settings Config.Set("algorithm-type-name", algorithm); _jobQueue = new JobQueue(); _notify = new Messaging(); _api = new Api(); /************ Comment one of the two following lines to select which ResultHandler to use ***********/ _resultshandler = new OptimizationResultHandler(); //_resultshandler = new ConsoleResultHandler(); _dataFeed = new FileSystemDataFeed(); _setup = new ConsoleSetupHandler(); _realTime = new BacktestingRealTimeHandler(); _historyProvider = new SubscriptionDataReaderHistoryProvider(); _transactions = new BacktestingTransactionHandler(); // Set the Log.LogHandler to only write to the log.txt file. // This setting avoids writing Log messages to the console. Log.LogHandler = (ILogHandler) new FileLogHandler(); Log.DebuggingEnabled = false; // Set this property to true for lots of messages Log.DebuggingLevel = 1; // A reminder that the default level for Log.Debug message is 1 var systemHandlers = new LeanEngineSystemHandlers(_jobQueue, _api, _notify); systemHandlers.Initialize(); var algorithmHandlers = new LeanEngineAlgorithmHandlers(_resultshandler, _setup, _dataFeed, _transactions, _realTime, _historyProvider); string algorithmPath; AlgorithmNodePacket job = systemHandlers.JobQueue.NextJob(out algorithmPath); try { var _engine = new Engine(systemHandlers, algorithmHandlers, Config.GetBool("live-mode")); _engine.Run(job, algorithmPath); } finally { /* The JobQueue.AcknowledgeJob only asks for any key to close the window. * We do not want that behavior, so we comment out this line so that multiple Leans will run * * The alternative is to comment out Console.Read(); the line in JobQueue class. */ //systemHandlers.JobQueue.AcknowledgeJob(job); Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId); // clean up resources systemHandlers.Dispose(); algorithmHandlers.Dispose(); Log.LogHandler.Dispose(); } }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var feed = new FileSystemDataFeed(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var dataPermissionManager = new DataPermissionManager(); var dataManager = new DataManager(feed, new UniverseSelection( algorithm, new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(algorithm.Portfolio)), dataPermissionManager, new DefaultDataProvider()), algorithm, algorithm.TimeKeeper, marketHoursDatabase, false, RegisteredSecurityDataTypesProvider.Null, dataPermissionManager); algorithm.SubscriptionManager.SetDataManager(dataManager); var synchronizer = new Synchronizer(); synchronizer.Initialize(algorithm, dataManager); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider, dataManager, synchronizer, dataPermissionManager.DataChannelProvider); algorithm.Initialize(); algorithm.PostInitialize(); var cancellationTokenSource = new CancellationTokenSource(); var count = 0; var stopwatch = Stopwatch.StartNew(); var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in synchronizer.StreamData(cancellationTokenSource.Token)) { if (timeSlice.Time.Month != lastMonth) { var elapsed = stopwatch.Elapsed.TotalSeconds; var thousands = count / 1000d; Log.Trace($"{DateTime.Now} - Time: {timeSlice.Time}: KPS: {thousands / elapsed}"); lastMonth = timeSlice.Time.Month; } count++; } Log.Trace("Count: " + count); stopwatch.Stop(); feed.Exit(); dataManager.RemoveAllSubscriptions(); Log.Trace($"Elapsed time: {stopwatch.Elapsed} KPS: {count / 1000d / stopwatch.Elapsed.TotalSeconds}"); }
private void LaunchLean() { Config.Set("environment", "backtesting"); string algorithm = "EMATest"; Config.Set("algorithm-type-name", algorithm); _jobQueue = new JobQueue(); _notify = new Messaging(); _api = new Api(); _resultshandler = new DesktopResultHandler(); _dataFeed = new FileSystemDataFeed(); _setup = new ConsoleSetupHandler(); _realTime = new BacktestingRealTimeHandler(); _historyProvider = new SubscriptionDataReaderHistoryProvider(); _transactions = new BacktestingTransactionHandler(); var systemHandlers = new LeanEngineSystemHandlers(_jobQueue, _api, _notify); systemHandlers.Initialize(); // var algorithmHandlers = new LeanEngineAlgorithmHandlers (_resultshandler, _setup, _dataFeed, _transactions, _realTime, _historyProvider); Log.LogHandler = Composer.Instance.GetExportedValueByTypeName <ILogHandler>(Config.Get("log-handler", "CompositeLogHandler")); LeanEngineAlgorithmHandlers leanEngineAlgorithmHandlers; try { leanEngineAlgorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(Composer.Instance); _resultshandler = leanEngineAlgorithmHandlers.Results; } catch (CompositionException compositionException) { Log.Error("Engine.Main(): Failed to load library: " + compositionException); throw; } string algorithmPath; AlgorithmNodePacket job = systemHandlers.JobQueue.NextJob(out algorithmPath); try { var _engine = new Engine(systemHandlers, leanEngineAlgorithmHandlers, Config.GetBool("live-mode")); _engine.Run(job, algorithmPath); } finally { //Delete the message from the job queue: //systemHandlers.JobQueue.AcknowledgeJob(job); Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId); // clean up resources systemHandlers.Dispose(); leanEngineAlgorithmHandlers.Dispose(); Log.LogHandler.Dispose(); } }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var feed = new FileSystemDataFeed(); var dataManager = new DataManager(feed, new UniverseSelection(feed, algorithm), algorithm.Settings, algorithm.TimeKeeper); algorithm.SubscriptionManager.SetDataManager(dataManager); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider, dataManager); algorithm.Initialize(); algorithm.PostInitialize(); var feedThreadStarted = new ManualResetEvent(false); var dataFeedThread = new Thread(() => { feedThreadStarted.Set(); feed.Run(); }) { IsBackground = true }; dataFeedThread.Start(); feedThreadStarted.WaitOne(); var count = 0; var stopwatch = Stopwatch.StartNew(); var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in feed) { if (timeSlice.Time.Month != lastMonth) { var elapsed = stopwatch.Elapsed.TotalSeconds; var thousands = count / 1000d; Console.WriteLine($"{DateTime.Now} - Time: {timeSlice.Time}: KPS: {thousands/elapsed}"); lastMonth = timeSlice.Time.Month; } count++; } Console.WriteLine("Count: " + count); stopwatch.Stop(); Console.WriteLine($"Elapsed time: {stopwatch.Elapsed} KPS: {count/1000d/stopwatch.Elapsed.TotalSeconds}"); }
public void OptionChainEnumerator(bool fillForward) { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var feed = new FileSystemDataFeed(); var algorithm = new AlgorithmStub(feed); algorithm.Transactions.SetOrderProcessor(new FakeOrderProcessor()); algorithm.SetStartDate(new DateTime(2014, 06, 06)); algorithm.SetEndDate(new DateTime(2014, 06, 09)); algorithm.SetOptionChainProvider(new BacktestingOptionChainProvider(TestGlobals.DataCacheProvider, TestGlobals.MapFileProvider)); var dataPermissionManager = new DataPermissionManager(); using var synchronizer = new Synchronizer(); synchronizer.Initialize(algorithm, algorithm.DataManager); feed.Initialize(algorithm, job, resultHandler, TestGlobals.MapFileProvider, TestGlobals.FactorFileProvider, TestGlobals.DataProvider, algorithm.DataManager, synchronizer, dataPermissionManager.DataChannelProvider); var option = algorithm.AddOption("AAPL", fillDataForward: fillForward); option.SetFilter(filter => filter.FrontMonth()); algorithm.PostInitialize(); using var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)); var count = 0; var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in synchronizer.StreamData(cancellationTokenSource.Token)) { if (!timeSlice.IsTimePulse && timeSlice.UniverseData?.Count > 0) { var baseDataCollection = timeSlice.UniverseData.Single().Value; if (baseDataCollection.Symbol.SecurityType == SecurityType.Option) { var nyTime = timeSlice.Time.ConvertFromUtc(algorithm.TimeZone); Assert.AreEqual(new TimeSpan(9, 30, 0).Add(TimeSpan.FromMinutes((count % 390) + 1)), nyTime.TimeOfDay, $"Failed on: {nyTime}"); Assert.IsNotNull(baseDataCollection.Underlying); // make sure the underlying time stamp is getting updated Assert.AreEqual(nyTime.TimeOfDay, baseDataCollection.Underlying.EndTime.TimeOfDay); Assert.AreEqual(nyTime.TimeOfDay, baseDataCollection.EndTime.ConvertFromUtc(algorithm.TimeZone).TimeOfDay); Assert.IsTrue(!baseDataCollection.FilteredContracts.IsNullOrEmpty()); count++; } } } feed.Exit(); algorithm.DataManager.RemoveAllSubscriptions(); // 9:30 to 15:59 -> 6.5 hours * 60 => 390 minutes * 2 days = 780 Assert.AreEqual(780, count); }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = PerformanceBenchmarkAlgorithms.SingleSecurity_Second; var feed = new FileSystemDataFeed(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var dataManager = new DataManager(feed, new UniverseSelection(feed, algorithm, new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algorithm)), algorithm.Settings, algorithm.TimeKeeper, marketHoursDatabase); algorithm.SubscriptionManager.SetDataManager(dataManager); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider, dataManager); algorithm.Initialize(); algorithm.PostInitialize(); var count = 0; var stopwatch = Stopwatch.StartNew(); var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in feed) { if (timeSlice.Time.Month != lastMonth) { var elapsed = stopwatch.Elapsed.TotalSeconds; var thousands = count / 1000d; Console.WriteLine($"{DateTime.Now} - Time: {timeSlice.Time}: KPS: {thousands/elapsed}"); lastMonth = timeSlice.Time.Month; } count++; } Console.WriteLine("Count: " + count); stopwatch.Stop(); feed.Exit(); Console.WriteLine($"Elapsed time: {stopwatch.Elapsed} KPS: {count/1000d/stopwatch.Elapsed.TotalSeconds}"); }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataProvider = new DefaultDataProvider(); var algorithm = new BenchmarkTest(); var feed = new FileSystemDataFeed(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataProvider); algorithm.Initialize(); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); feedThreadStarted.WaitOne(); var stopwatch = Stopwatch.StartNew(); var lastMonth = -1; var count = 0; foreach (var timeSlice in feed) { if (timeSlice.Time.Month != lastMonth) { Console.WriteLine(DateTime.Now + " - Time: " + timeSlice.Time); lastMonth = timeSlice.Time.Month; } count++; } Console.WriteLine("Count: " + count); stopwatch.Stop(); Console.WriteLine("Elapsed time: " + stopwatch.Elapsed); }
private void LaunchLean(string id) { ConfigMerger.Merge(_config, id); Config.Set("api-handler", nameof(EmptyApiHandler)); //todo: instance logging //var logFileName = "log" + DateTime.Now.ToString("yyyyMMddssfffffff") + "_" + id + ".txt"; Log.LogHandler = LogSingleton.Instance; var jobQueue = new JobQueue(); var manager = new LocalLeanManager(); var systemHandlers = new LeanEngineSystemHandlers( jobQueue, new EmptyApiHandler(), new QuantConnect.Messaging.Messaging(), manager); systemHandlers.Initialize(); var map = new LocalDiskMapFileProvider(); var results = new OptimizerResultHandler(); var transactions = new BacktestingTransactionHandler(); var dataFeed = new FileSystemDataFeed(); var realTime = new BacktestingRealTimeHandler(); var data = new DefaultDataProvider(); var leanEngineAlgorithmHandlers = new LeanEngineAlgorithmHandlers( results, new ConsoleSetupHandler(), dataFeed, transactions, realTime, map, new LocalDiskFactorFileProvider(map), data, new OptimizerAlphaHandler(), new EmptyObjectStore()); _resultsHandler = (OptimizerResultHandler)leanEngineAlgorithmHandlers.Results; var job = (BacktestNodePacket)systemHandlers.JobQueue.NextJob(out var algorithmPath); //mark job with id. Is set on algorithm in OptimizerAlphaHandler job.BacktestId = id; //todo: pass period through job //job.PeriodStart = _config.StartDate; //job.PeriodFinish = _config.EndDate; Engine engine; AlgorithmManager algorithmManager; try { algorithmManager = new AlgorithmManager(false); systemHandlers.LeanManager.Initialize(systemHandlers, leanEngineAlgorithmHandlers, job, algorithmManager); engine = new Engine(systemHandlers, leanEngineAlgorithmHandlers, false); using (var workerThread = new MultipleWorkerThread()) { engine.Run(job, algorithmManager, algorithmPath, workerThread); } } finally { // clean up resources Composer.Instance.Reset(); results.Charts.Clear(); results.Messages.Clear(); if (results.Algorithm != null) { results.Algorithm.Transactions.TransactionRecord.Clear(); results.Algorithm.SubscriptionManager.Subscriptions.SelectMany(s => s.Consolidators)?.ToList().ForEach(f => { results.Algorithm.SubscriptionManager.RemoveConsolidator(f.WorkingData?.Symbol, f); UnregisterAllEvents(f); }); if (results.Algorithm is QCAlgorithm) { ((QCAlgorithm)results.Algorithm).SubscriptionManager.Subscriptions.ToList().Clear(); } if (_config.AlgorithmLanguage != "Python") { results.Algorithm.HistoryProvider = null; } var closedTrades = (List <Trade>) typeof(TradeBuilder).GetField("_closedTrades", BindingFlags.Instance | BindingFlags.NonPublic).GetValue(results.Algorithm.TradeBuilder); closedTrades.Clear(); results.Algorithm = null; } transactions.Orders.Clear(); transactions.OrderTickets.Clear(); manager.Dispose(); systemHandlers.Dispose(); leanEngineAlgorithmHandlers.Dispose(); results = null; dataFeed = null; transactions = null; realTime = null; data = null; map = null; systemHandlers = null; leanEngineAlgorithmHandlers = null; algorithmManager = null; engine = null; job = null; jobQueue = null; manager = null; } }
public void FutureChainEnumerator(bool fillForward) { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var feed = new FileSystemDataFeed(); var algorithm = new AlgorithmStub(feed); algorithm.Transactions.SetOrderProcessor(new FakeOrderProcessor()); algorithm.SetStartDate(new DateTime(2013, 10, 07)); algorithm.SetEndDate(new DateTime(2013, 10, 08)); algorithm.SetFutureChainProvider(new BacktestingFutureChainProvider(TestGlobals.DataCacheProvider)); var dataPermissionManager = new DataPermissionManager(); using var synchronizer = new Synchronizer(); synchronizer.Initialize(algorithm, algorithm.DataManager); feed.Initialize(algorithm, job, resultHandler, TestGlobals.MapFileProvider, TestGlobals.FactorFileProvider, TestGlobals.DataProvider, algorithm.DataManager, synchronizer, dataPermissionManager.DataChannelProvider); var future = algorithm.AddFuture("ES", fillDataForward: fillForward); future.SetFilter(0, 300); algorithm.PostInitialize(); using var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)); var count = 0L; var lastMonth = algorithm.StartDate.Month; foreach (var timeSlice in synchronizer.StreamData(cancellationTokenSource.Token)) { if (!timeSlice.IsTimePulse && timeSlice.UniverseData?.Count > 0) { var nyTime = timeSlice.Time.ConvertFromUtc(algorithm.TimeZone); var currentExpectedTime = new TimeSpan(0, 0, 0).Add(TimeSpan.FromMinutes(count % (24 * 60))); while (!future.Exchange.DateTimeIsOpen(nyTime.Date.Add(currentExpectedTime).AddMinutes(-1))) { // skip closed market times currentExpectedTime = new TimeSpan(0, 0, 0).Add(TimeSpan.FromMinutes(++count % (24 * 60))); } var universeData = timeSlice.UniverseData.OrderBy(kvp => kvp.Key.Configuration.Symbol).ToList(); var chainData = universeData[0].Value; Log.Trace($"{nyTime}. Count: {count}. Universe Data Count {universeData.Count}"); Assert.AreEqual(currentExpectedTime, nyTime.TimeOfDay, $"Failed on: {nyTime}. Count: {count}"); Assert.IsTrue(timeSlice.UniverseData.All(kvp => kvp.Value.EndTime.ConvertFromUtc(algorithm.TimeZone).TimeOfDay == nyTime.TimeOfDay)); if (chainData.FilteredContracts.IsNullOrEmpty()) { Assert.AreEqual(new DateTime(2013, 10, 09), nyTime, $"Unexpected chain FilteredContracts was empty on {nyTime}"); } if (universeData.Count == 1) { // the chain Assert.IsTrue(universeData.Any(kvp => kvp.Key.Configuration.Symbol == future.Symbol)); } else { // we have 2 universe data, the chain and the continuous future Assert.AreEqual(2, universeData.Count); Assert.IsTrue(universeData.All(kvp => kvp.Key.Configuration.Symbol.SecurityType == SecurityType.Future)); Assert.IsTrue(universeData.Any(kvp => kvp.Key.Configuration.Symbol == future.Symbol)); Assert.IsTrue(universeData.Any(kvp => kvp.Key.Configuration.Symbol.ID.Symbol.Contains("CONTINUOUS", StringComparison.InvariantCultureIgnoreCase))); var continuousData = universeData[1].Value; Assert.AreEqual(currentExpectedTime, nyTime.TimeOfDay, $"Failed on: {nyTime}"); Assert.IsTrue(!chainData.FilteredContracts.IsNullOrEmpty()); } count++; } } feed.Exit(); algorithm.DataManager.RemoveAllSubscriptions(); // 2 days worth of minute data Assert.AreEqual(24 * 2 * 60 + 1, count); }
private void LaunchLean(string id) { Config.Set("environment", "backtesting"); if (!string.IsNullOrEmpty(_config.AlgorithmTypeName)) { Config.Set("algorithm-type-name", _config.AlgorithmTypeName); } if (!string.IsNullOrEmpty(_config.AlgorithmLocation)) { Config.Set("algorithm-location", Path.GetFileName(_config.AlgorithmLocation)); } if (!string.IsNullOrEmpty(_config.DataFolder)) { Config.Set("data-folder", _config.DataFolder); } if (!string.IsNullOrEmpty(_config.TransactionLog)) { var filename = _config.TransactionLog; filename = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, Path.GetFileNameWithoutExtension(filename) + id + Path.GetExtension(filename)); Config.Set("transaction-log", filename); } Config.Set("api-handler", nameof(EmptyApiHandler)); Config.Set("backtesting.result-handler", nameof(OptimizerResultHandler)); //Composer.Instance.Reset(); //todo: instance logging //var logFileName = "log" + DateTime.Now.ToString("yyyyMMddssfffffff") + "_" + id + ".txt"; Log.LogHandler = LogSingleton.Instance; var jobQueue = new JobQueue(); var manager = new LocalLeanManager(); var systemHandlers = new LeanEngineSystemHandlers( jobQueue, new EmptyApiHandler(), new QuantConnect.Messaging.Messaging(), manager); systemHandlers.Initialize(); var map = new LocalDiskMapFileProvider(); var results = new OptimizerResultHandler(); var transactions = new BacktestingTransactionHandler(); var dataFeed = new FileSystemDataFeed(); var realTime = new BacktestingRealTimeHandler(); var data = new DefaultDataProvider(); var leanEngineAlgorithmHandlers = new LeanEngineAlgorithmHandlers( results, new ConsoleSetupHandler(), dataFeed, transactions, realTime, map, new LocalDiskFactorFileProvider(map), data, new OptimizerAlphaHandler(), new EmptyObjectStore()); _resultsHandler = (OptimizerResultHandler)leanEngineAlgorithmHandlers.Results; var job = (BacktestNodePacket)systemHandlers.JobQueue.NextJob(out var algorithmPath); //mark job with id. Is set on algorithm in OptimizerAlphaHandler job.BacktestId = id; //todo: pass period through job //job.PeriodStart = _config.StartDate; //job.PeriodFinish = _config.EndDate; Engine engine; AlgorithmManager algorithmManager; try { algorithmManager = new AlgorithmManager(false); systemHandlers.LeanManager.Initialize(systemHandlers, leanEngineAlgorithmHandlers, job, algorithmManager); engine = new Engine(systemHandlers, leanEngineAlgorithmHandlers, false); using (var workerThread = new MultipleWorkerThread()) { engine.Run(job, algorithmManager, algorithmPath, workerThread); } } finally { // clean up resources Composer.Instance.Reset(); results.Charts.Clear(); results.Messages.Clear(); results.Algorithm.Transactions.TransactionRecord.Clear(); var closedTrades = (List <Trade>) typeof(TradeBuilder).GetField("_closedTrades", BindingFlags.Instance | BindingFlags.NonPublic).GetValue(results.Algorithm.TradeBuilder); closedTrades.Clear(); results.Algorithm.HistoryProvider = null; results.Algorithm = null; transactions.Orders.Clear(); transactions.OrderTickets.Clear(); manager.Dispose(); systemHandlers.Dispose(); leanEngineAlgorithmHandlers.Dispose(); results = null; dataFeed = null; transactions = null; realTime = null; data = null; map = null; systemHandlers = null; leanEngineAlgorithmHandlers = null; algorithmManager = null; engine = null; job = null; jobQueue = null; manager = null; } }