public void EstimizeConsensusReaderTest() { var dataCacheProvider = new SingleEntryDataCacheProvider(new DefaultDataProvider()); var config = new SubscriptionDataConfig( typeof(EstimizeConsensus), Symbol.Create("AAPL.C", SecurityType.Base, QuantConnect.Market.USA), Resolution.Daily, DateTimeZone.Utc, DateTimeZone.Utc, false, false, false, true ); var data = new EstimizeConsensus(); var date = new DateTime(2019, 6, 10); var source = data.GetSource(config, date, false); var factory = SubscriptionDataSourceReader.ForSource(source, dataCacheProvider, config, date, false, data); var rows = factory.Read(source).ToList(); Assert.IsTrue(rows.Count > 0); }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { var configuration = request.Configuration; var sourceFactory = (BaseData)Activator.CreateInstance(request.Configuration.Type); // we want the first selection to happen on the start time // so we need the previous tradable day time, since coarse // files are for each tradable date but emitted with next day time var previousTradableDay = Time.GetStartTimeForTradeBars( request.Security.Exchange.Hours, request.StartTimeLocal, Time.OneDay, 1, false); var tradableDays = new[] { previousTradableDay }.Concat(request.TradableDays); // Behaves in the same way as in live trading // (i.e. only emit coarse data on dates following a trading day) // The shifting of dates is needed to ensure we never emit coarse data on the same date, // because it would enable look-ahead bias. foreach (var date in tradableDays) { var source = sourceFactory.GetSource(configuration, date, false); var factory = SubscriptionDataSourceReader.ForSource(source, dataCacheProvider, configuration, date, false); var coarseFundamentalForDate = factory.Read(source); // shift all date of emitting the file forward one day to model emitting coarse midnight the next day. yield return(new BaseDataCollection(date.AddDays(1), configuration.Symbol, coarseFundamentalForDate)); } } }
private IEnumerator <BaseData> EnumerateDataSourceReader(SubscriptionDataConfig config, IDataProvider dataProvider, Ref <DateTime> localFrontier, SubscriptionDataSource source, DateTime localDate) { using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { var newLocalFrontier = localFrontier.Value; var dataSourceReader = GetSubscriptionDataSourceReader(source, dataCacheProvider, config, localDate); foreach (var datum in dataSourceReader.Read(source)) { // always skip past all times emitted on the previous invocation of this enumerator // this allows data at the same time from the same refresh of the source while excluding // data from different refreshes of the source if (datum != null && datum.EndTime > localFrontier.Value) { yield return(datum); } else if (!SourceRequiresFastForward(source)) { // if the 'source' is Rest and there is no new value, // we return null, else we will be caught in a tight loop // because Rest source never ends! yield return(null); } if (datum != null) { newLocalFrontier = Time.Max(datum.EndTime, newLocalFrontier); } } localFrontier.Value = newLocalFrontier; } }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { var configuration = request.Configuration; var tradableDays = _tradableDaysProvider(request); var sourceFactory = (BaseData)Activator.CreateInstance(request.Configuration.Type); // Note: this enumerator factory is currently only used in backtesting with coarse data // and has been updated to behave in the same way as in live trading // (i.e. only emit coarse data on dates following a trading day) // The shifting of dates is needed to ensure we never emit coarse data on the same date, // because it would enable look-ahead bias. // shift all tradeable dates forward one day foreach (var date in tradableDays.Select(x => x.AddDays(1))) { // request the file for the previous date, which is a tradeable day var source = sourceFactory.GetSource(configuration, date.AddDays(-1), false); var factory = SubscriptionDataSourceReader.ForSource(source, dataCacheProvider, configuration, date.AddDays(-1), false); var coarseFundamentalForDate = factory.Read(source); // Coarse data has a period of one day (EndTime == Time + OneDay) but BaseDataCollection has no period (EndTime == Time), // so we need to add one more day here. yield return(new BaseDataCollection(date.AddDays(1), configuration.Symbol, coarseFundamentalForDate)); } } }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { var tradableDays = _tradableDaysProvider(request); var fineFundamental = new FineFundamental(); var fineFundamentalConfiguration = new SubscriptionDataConfig(request.Configuration, typeof(FineFundamental), request.Security.Symbol); foreach (var date in tradableDays) { var fineFundamentalSource = GetSource(fineFundamental, fineFundamentalConfiguration, date); var fineFundamentalFactory = SubscriptionDataSourceReader.ForSource(fineFundamentalSource, dataCacheProvider, fineFundamentalConfiguration, date, _isLiveMode); var fineFundamentalForDate = (FineFundamental)fineFundamentalFactory.Read(fineFundamentalSource).FirstOrDefault(); yield return(new FineFundamental { DataType = MarketDataType.Auxiliary, Symbol = request.Configuration.Symbol, Time = date, CompanyReference = fineFundamentalForDate != null ? fineFundamentalForDate.CompanyReference : new CompanyReference(), SecurityReference = fineFundamentalForDate != null ? fineFundamentalForDate.SecurityReference : new SecurityReference(), FinancialStatements = fineFundamentalForDate != null ? fineFundamentalForDate.FinancialStatements : new FinancialStatements(), EarningReports = fineFundamentalForDate != null ? fineFundamentalForDate.EarningReports : new EarningReports(), OperationRatios = fineFundamentalForDate != null ? fineFundamentalForDate.OperationRatios : new OperationRatios(), EarningRatios = fineFundamentalForDate != null ? fineFundamentalForDate.EarningRatios : new EarningRatios(), ValuationRatios = fineFundamentalForDate != null ? fineFundamentalForDate.ValuationRatios : new ValuationRatios(), AssetClassification = fineFundamentalForDate != null ? fineFundamentalForDate.AssetClassification : new AssetClassification(), CompanyProfile = fineFundamentalForDate != null ? fineFundamentalForDate.CompanyProfile : new CompanyProfile() }); } } }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { _dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider); var configuration = request.Configuration; var tradableDays = _tradableDaysProvider(request); var sourceFactory = (BaseData)Activator.CreateInstance(request.Configuration.Type); return(( from date in tradableDays let source = sourceFactory.GetSource(configuration, date, false) let factory = SubscriptionDataSourceReader.ForSource(source, _dataCacheProvider, configuration, date, false) let coarseFundamentalForDate = factory.Read(source) select new BaseDataCollection(date.AddDays(1), configuration.Symbol, coarseFundamentalForDate) ).GetEnumerator()); }
List <Symbol> LoadFutureChain(Symbol baseFuture, DateTime date, TickType tickType, Resolution res) { var filePath = LeanData.GenerateZipFilePath(_dataDirectory, baseFuture, date, res, tickType); //load future chain first var config = new SubscriptionDataConfig(typeof(ZipEntryName), baseFuture, res, TimeZones.NewYork, TimeZones.NewYork, false, false, false, false, tickType); var dataProvider = new DefaultDataProvider(); var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider); var factory = new ZipEntryNameSubscriptionDataSourceReader(dataCacheProvider, config, date, false); return(factory.Read(new SubscriptionDataSource(filePath, SubscriptionTransportMedium.LocalFile, FileFormat.ZipEntryName)) .Select(s => s.Symbol).ToList()); }
public void QuandlDownloadDoesNotThrow() { Quandl.SetAuthCode("WyAazVXnq7ATy_fefTqm"); RemoteFileSubscriptionStreamReader.SetDownloadProvider(new Api.Api()); var data = new HistoryAlgorithm.QuandlFuture(); const string ticker = "CHRIS/CME_SP1"; var date = new DateTime(2018, 8, 31); var config = new SubscriptionDataConfig(typeof(HistoryAlgorithm.QuandlFuture), Symbol.Create(ticker, SecurityType.Base, QuantConnect.Market.USA), Resolution.Daily, DateTimeZone.Utc, DateTimeZone.Utc, false, false, false, true); var source = data.GetSource(config, date, false); var dataCacheProvider = new SingleEntryDataCacheProvider(new DefaultDataProvider()); var factory = SubscriptionDataSourceReader.ForSource(source, dataCacheProvider, config, date, false, data); var rows = factory.Read(source).ToList(); Assert.IsTrue(rows.Count > 0); }
private IEnumerator <BaseData> EnumerateDataSourceReader(SubscriptionDataConfig config, IDataProvider dataProvider, Ref <DateTime> localFrontier, SubscriptionDataSource source, DateTime localDate, BaseData baseDataInstance) { using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { var newLocalFrontier = localFrontier.Value; var dataSourceReader = GetSubscriptionDataSourceReader(source, dataCacheProvider, config, localDate, baseDataInstance); foreach (var datum in dataSourceReader.Read(source)) { // always skip past all times emitted on the previous invocation of this enumerator // this allows data at the same time from the same refresh of the source while excluding // data from different refreshes of the source if (datum != null && datum.EndTime > localFrontier.Value) { yield return(datum); } else if (!SourceRequiresFastForward(source)) { // if the 'source' is Rest and there is no new value, // we *break*, else we will be caught in a tight loop // because Rest source never ends! // edit: we 'break' vs 'return null' so that the source is refreshed // allowing date changes to impact the source value // note it will respect 'minimumTimeBetweenCalls' break; } if (datum != null) { newLocalFrontier = Time.Max(datum.EndTime, newLocalFrontier); if (!SourceRequiresFastForward(source)) { // if the 'source' is Rest we need to update the localFrontier here // because Rest source never ends! // Should be advance frontier for all source types here? localFrontier.Value = newLocalFrontier; } } } localFrontier.Value = newLocalFrontier; } }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { var sourceFactory = (BaseData)ObjectActivator.GetActivator(request.Configuration.Type).Invoke(new object[] { request.Configuration.Type }); using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { foreach (var date in _tradableDaysProvider(request)) { request.Configuration.MappedSymbol = GetMappedSymbol(request, date); var source = sourceFactory.GetSource(request.Configuration, date, _isLiveMode); var factory = SubscriptionDataSourceReader.ForSource(source, dataCacheProvider, request.Configuration, date, _isLiveMode); var entriesForDate = factory.Read(source); foreach (var entry in entriesForDate) { yield return(entry); } } } }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { _dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider); var sourceFactory = (BaseData)Activator.CreateInstance(request.Configuration.Type); foreach (var date in _tradableDaysProvider(request)) { var currentSymbol = request.Configuration.MappedSymbol; request.Configuration.MappedSymbol = GetMappedSymbol(request, date); var source = sourceFactory.GetSource(request.Configuration, date, false); request.Configuration.MappedSymbol = currentSymbol; var factory = SubscriptionDataSourceReader.ForSource(source, _dataCacheProvider, request.Configuration, date, false); var entriesForDate = factory.Read(source); foreach (var entry in entriesForDate) { yield return(entry); } } }
private IEnumerator <BaseData> EnumerateDataSourceReader(SubscriptionDataConfig config, IDataProvider dataProvider, Ref <DateTime> localFrontier, SubscriptionDataSource source, DateTime localDate) { using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { var newLocalFrontier = localFrontier.Value; var dataSourceReader = GetSubscriptionDataSourceReader(source, dataCacheProvider, config, localDate); foreach (var datum in dataSourceReader.Read(source)) { // always skip past all times emitted on the previous invocation of this enumerator // this allows data at the same time from the same refresh of the source while excluding // data from different refreshes of the source if (datum.EndTime > localFrontier.Value) { yield return(datum); } newLocalFrontier = Time.Max(datum.EndTime, newLocalFrontier); } localFrontier.Value = newLocalFrontier; } }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { var sourceFactory = request.Configuration.GetBaseDataInstance(); using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { foreach (var date in _tradableDaysProvider(request)) { if (sourceFactory.RequiresMapping()) { request.Configuration.MappedSymbol = GetMappedSymbol(request.Configuration, date); } var source = sourceFactory.GetSource(request.Configuration, date, _isLiveMode); var factory = SubscriptionDataSourceReader.ForSource(source, dataCacheProvider, request.Configuration, date, _isLiveMode, sourceFactory); var entriesForDate = factory.Read(source); foreach (var entry in entriesForDate) { yield return(entry); } } } }
public void ReadsZipEntryNames() { var time = new DateTime(2016, 03, 03, 12, 48, 15); var source = Path.Combine("TestData", "20151224_quote_american.zip"); var config = new SubscriptionDataConfig(typeof(ZipEntryName), Symbol.Create("XLRE", SecurityType.Option, Market.USA), Resolution.Tick, TimeZones.NewYork, TimeZones.NewYork, false, false, false); var dataProvider = new DefaultDataProvider(); var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider); var factory = new ZipEntryNameSubscriptionDataSourceReader(dataCacheProvider, config, time, false); var expected = new[] { Symbol.CreateOption("XLRE", Market.USA, OptionStyle.American, OptionRight.Call, 21m, new DateTime(2016, 08, 19)), Symbol.CreateOption("XLRE", Market.USA, OptionStyle.American, OptionRight.Call, 22m, new DateTime(2016, 08, 19)), Symbol.CreateOption("XLRE", Market.USA, OptionStyle.American, OptionRight.Put, 37m, new DateTime(2016, 08, 19)), }; var actual = factory.Read(new SubscriptionDataSource(source, SubscriptionTransportMedium.LocalFile, FileFormat.ZipEntryName)).ToList(); // we only really care about the symbols CollectionAssert.AreEqual(expected, actual.Select(x => x.Symbol)); Assert.IsTrue(actual.All(x => x is ZipEntryName)); }
/// <summary> /// Creates an enumerator to read the specified request /// </summary> /// <param name="request">The subscription request to be read</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <returns>An enumerator reading the subscription request</returns> public IEnumerator <BaseData> CreateEnumerator(SubscriptionRequest request, IDataProvider dataProvider) { using (var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider)) { var tradableDays = _tradableDaysProvider(request); var fineFundamentalConfiguration = new SubscriptionDataConfig(request.Configuration, typeof(FineFundamental), request.Security.Symbol); foreach (var date in tradableDays) { var fineFundamentalSource = GetSource(FineFundamental, fineFundamentalConfiguration, date); var fineFundamentalFactory = SubscriptionDataSourceReader.ForSource(fineFundamentalSource, dataCacheProvider, fineFundamentalConfiguration, date, _isLiveMode, FineFundamental, dataProvider); var fineFundamentalForDate = (FineFundamental)fineFundamentalFactory.Read(fineFundamentalSource).FirstOrDefault(); // directly do not emit null points. Null points won't happen when used with Coarse data since we are pre filtering based on Coarse.HasFundamentalData // but could happen when fine filtering custom universes if (fineFundamentalForDate != null) { yield return(new FineFundamental { DataType = MarketDataType.Auxiliary, Symbol = request.Configuration.Symbol, Time = date, CompanyReference = fineFundamentalForDate.CompanyReference, SecurityReference = fineFundamentalForDate.SecurityReference, FinancialStatements = fineFundamentalForDate.FinancialStatements, EarningReports = fineFundamentalForDate.EarningReports, OperationRatios = fineFundamentalForDate.OperationRatios, EarningRatios = fineFundamentalForDate.EarningRatios, ValuationRatios = fineFundamentalForDate.ValuationRatios, AssetClassification = fineFundamentalForDate.AssetClassification, CompanyProfile = fineFundamentalForDate.CompanyProfile }); } } } }
public void DataIsCorrect() { var date = new DateTime(2017, 10, 22); var config = new SubscriptionDataConfig( typeof(QuoteBar), Symbol.Create("OANDA/EURUSD", SecurityType.Forex, Market.Oanda), Resolution.Minute, TimeZones.NewYork, TimeZones.NewYork, true, true, false); //var dataCacheProvider = new CustomEphemeralDataCacheProvider { IsDataEphemeral = true }; var dataProvider = new QuantConnect.Lean.Engine.DataFeeds.OandaDataProvider(); //dataProvider.Initialize(TestConfiguration.Parameters["token"], TestConfiguration.Parameters["dataPath"]); var dataCacheProvider = new SingleEntryDataCacheProvider(dataProvider); var reader = new TextSubscriptionDataSourceReader( dataCacheProvider, config, date, false); Config.Set("oanda-data-access-token", TestConfiguration.Parameters["token"]); Config.Set("data-folder", TestConfiguration.Parameters["dataPath"]); Globals.Reset(); var source = (new ForexOandaVolume()).GetSource(config, date, false); var dataBars = reader.Read(source); decimal[] prices = { 1.176455m, 1.17648m }; BaseData[] data = dataBars.ToArray(); Assert.AreEqual(data[0].Price, prices[0]); Assert.AreEqual(data[1].Price, prices[1]); }
public void Setup() { _singleEntryDataCacheProvider = new SingleEntryDataCacheProvider(new DefaultDataProvider()); }
/// <summary> /// Runs a single backtest/live job from the job queue /// </summary> /// <param name="job">The algorithm job to be processed</param> /// <param name="assemblyPath">The path to the algorithm's assembly</param> public void Run(AlgorithmNodePacket job, string assemblyPath) { var algorithm = default(IAlgorithm); var algorithmManager = new AlgorithmManager(_liveMode); //Start monitoring the backtest active status: var statusPing = new StateCheck.Ping(algorithmManager, _systemHandlers.Api, _algorithmHandlers.Results, _systemHandlers.Notify, job); var statusPingThread = new Thread(statusPing.Run); statusPingThread.Start(); try { //Reset thread holders. var initializeComplete = false; Thread threadFeed = null; Thread threadTransactions = null; Thread threadResults = null; Thread threadRealTime = null; //-> Initialize messaging system _systemHandlers.Notify.SetAuthentication(job); //-> Set the result handler type for this algorithm job, and launch the associated result thread. _algorithmHandlers.Results.Initialize(job, _systemHandlers.Notify, _systemHandlers.Api, _algorithmHandlers.DataFeed, _algorithmHandlers.Setup, _algorithmHandlers.Transactions); threadResults = new Thread(_algorithmHandlers.Results.Run, 0) { Name = "Result Thread" }; threadResults.Start(); IBrokerage brokerage = null; try { // Save algorithm to cache, load algorithm instance: algorithm = _algorithmHandlers.Setup.CreateAlgorithmInstance(job, assemblyPath); // Initialize the brokerage IBrokerageFactory factory; brokerage = _algorithmHandlers.Setup.CreateBrokerage(job, algorithm, out factory); // Initialize the data feed before we initialize so he can intercept added securities/universes via events _algorithmHandlers.DataFeed.Initialize(algorithm, job, _algorithmHandlers.Results, _algorithmHandlers.MapFileProvider, _algorithmHandlers.FactorFileProvider, _algorithmHandlers.DataProvider); // initialize command queue system _algorithmHandlers.CommandQueue.Initialize(job, algorithm); // set the history provider before setting up the algorithm var historyProvider = GetHistoryProvider(job.HistoryProvider); if (historyProvider is BrokerageHistoryProvider) { (historyProvider as BrokerageHistoryProvider).SetBrokerage(brokerage); } var historyDataCacheProvider = new SingleEntryDataCacheProvider(_algorithmHandlers.DataProvider); historyProvider.Initialize(job, _algorithmHandlers.DataProvider, historyDataCacheProvider, _algorithmHandlers.MapFileProvider, _algorithmHandlers.FactorFileProvider, progress => { // send progress updates to the result handler only during initialization if (!algorithm.GetLocked() || algorithm.IsWarmingUp) { _algorithmHandlers.Results.SendStatusUpdate(AlgorithmStatus.History, string.Format("Processing history {0}%...", progress)); } }); algorithm.HistoryProvider = historyProvider; // initialize the default brokerage message handler algorithm.BrokerageMessageHandler = factory.CreateBrokerageMessageHandler(algorithm, job, _systemHandlers.Api); //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method. initializeComplete = _algorithmHandlers.Setup.Setup(algorithm, brokerage, job, _algorithmHandlers.Results, _algorithmHandlers.Transactions, _algorithmHandlers.RealTime); // set this again now that we've actually added securities _algorithmHandlers.Results.SetAlgorithm(algorithm); //If there are any reasons it failed, pass these back to the IDE. if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || _algorithmHandlers.Setup.Errors.Count > 0) { initializeComplete = false; //Get all the error messages: internal in algorithm and external in setup handler. var errorMessage = String.Join(",", algorithm.ErrorMessages); errorMessage += String.Join(",", _algorithmHandlers.Setup.Errors); Log.Error("Engine.Run(): " + errorMessage); _algorithmHandlers.Results.RuntimeError(errorMessage); _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, errorMessage); } } catch (Exception err) { Log.Error(err); var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err.StackTrace; _algorithmHandlers.Results.RuntimeError(runtimeMessage, err.StackTrace); _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage); } // log the job endpoints Log.Trace("JOB HANDLERS: "); Log.Trace(" DataFeed: " + _algorithmHandlers.DataFeed.GetType().FullName); Log.Trace(" Setup: " + _algorithmHandlers.Setup.GetType().FullName); Log.Trace(" RealTime: " + _algorithmHandlers.RealTime.GetType().FullName); Log.Trace(" Results: " + _algorithmHandlers.Results.GetType().FullName); Log.Trace(" Transactions: " + _algorithmHandlers.Transactions.GetType().FullName); Log.Trace(" Commands: " + _algorithmHandlers.CommandQueue.GetType().FullName); if (algorithm != null && algorithm.HistoryProvider != null) { Log.Trace(" History Provider: " + algorithm.HistoryProvider.GetType().FullName); } if (job is LiveNodePacket) { Log.Trace(" Brokerage: " + brokerage.GetType().FullName); } //-> Using the job + initialization: load the designated handlers: if (initializeComplete) { //-> Reset the backtest stopwatch; we're now running the algorithm. var startTime = DateTime.Now; //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates. algorithm.SetAlgorithmId(job.AlgorithmId); algorithm.SetLocked(); //Load the associated handlers for transaction and realtime events: _algorithmHandlers.Transactions.Initialize(algorithm, brokerage, _algorithmHandlers.Results); _algorithmHandlers.RealTime.Setup(algorithm, job, _algorithmHandlers.Results, _systemHandlers.Api); // wire up the brokerage message handler brokerage.Message += (sender, message) => { algorithm.BrokerageMessageHandler.Handle(message); // fire brokerage message events algorithm.OnBrokerageMessage(message); switch (message.Type) { case BrokerageMessageType.Disconnect: algorithm.OnBrokerageDisconnect(); break; case BrokerageMessageType.Reconnect: algorithm.OnBrokerageReconnect(); break; } }; //Send status to user the algorithm is now executing. _algorithmHandlers.Results.SendStatusUpdate(AlgorithmStatus.Running); //Launch the data, transaction and realtime handlers into dedicated threads threadFeed = new Thread(_algorithmHandlers.DataFeed.Run) { Name = "DataFeed Thread" }; threadTransactions = new Thread(_algorithmHandlers.Transactions.Run) { Name = "Transaction Thread" }; threadRealTime = new Thread(_algorithmHandlers.RealTime.Run) { Name = "RealTime Thread" }; //Launch the data feed, result sending, and transaction models/handlers in separate threads. threadFeed.Start(); // Data feed pushing data packets into thread bridge; threadTransactions.Start(); // Transaction modeller scanning new order requests threadRealTime.Start(); // RealTime scan time for time based events: // Result manager scanning message queue: (started earlier) _algorithmHandlers.Results.DebugMessage(string.Format("Launching analysis for {0} with LEAN Engine v{1}", job.AlgorithmId, Globals.Version)); try { //Create a new engine isolator class var isolator = new Isolator(); // Execute the Algorithm Code: var complete = isolator.ExecuteWithTimeLimit(_algorithmHandlers.Setup.MaximumRuntime, algorithmManager.TimeLoopWithinLimits, () => { try { //Run Algorithm Job: // -> Using this Data Feed, // -> Send Orders to this TransactionHandler, // -> Send Results to ResultHandler. algorithmManager.Run(job, algorithm, _algorithmHandlers.DataFeed, _algorithmHandlers.Transactions, _algorithmHandlers.Results, _algorithmHandlers.RealTime, _algorithmHandlers.CommandQueue, isolator.CancellationToken); } catch (Exception err) { //Debugging at this level is difficult, stack trace needed. Log.Error(err); algorithm.RunTimeError = err; algorithmManager.SetStatus(AlgorithmStatus.RuntimeError); return; } Log.Trace("Engine.Run(): Exiting Algorithm Manager"); }, job.Controls.RamAllocation); if (!complete) { Log.Error("Engine.Main(): Failed to complete in time: " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F")); throw new Exception("Failed to complete algorithm within " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F") + " seconds. Please make it run faster."); } // Algorithm runtime error: if (algorithm.RunTimeError != null) { HandleAlgorithmError(job, algorithm.RunTimeError); } } catch (Exception err) { //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed. HandleAlgorithmError(job, err); } try { var trades = algorithm.TradeBuilder.ClosedTrades; var charts = new Dictionary <string, Chart>(_algorithmHandlers.Results.Charts); var orders = new Dictionary <int, Order>(_algorithmHandlers.Transactions.Orders); var holdings = new Dictionary <string, Holding>(); var banner = new Dictionary <string, string>(); var statisticsResults = new StatisticsResults(); var csvTransactionsFileName = Config.Get("transaction-log"); if (!string.IsNullOrEmpty(csvTransactionsFileName)) { SaveListOfTrades(_algorithmHandlers.Transactions, csvTransactionsFileName); } try { //Generates error when things don't exist (no charting logged, runtime errors in main algo execution) const string strategyEquityKey = "Strategy Equity"; const string equityKey = "Equity"; const string dailyPerformanceKey = "Daily Performance"; const string benchmarkKey = "Benchmark"; // make sure we've taken samples for these series before just blindly requesting them if (charts.ContainsKey(strategyEquityKey) && charts[strategyEquityKey].Series.ContainsKey(equityKey) && charts[strategyEquityKey].Series.ContainsKey(dailyPerformanceKey)) { var equity = charts[strategyEquityKey].Series[equityKey].Values; var performance = charts[strategyEquityKey].Series[dailyPerformanceKey].Values; var profitLoss = new SortedDictionary <DateTime, decimal>(algorithm.Transactions.TransactionRecord); var totalTransactions = algorithm.Transactions.GetOrders(x => x.Status.IsFill()).Count(); var benchmark = charts[benchmarkKey].Series[benchmarkKey].Values; statisticsResults = StatisticsBuilder.Generate(trades, profitLoss, equity, performance, benchmark, _algorithmHandlers.Setup.StartingPortfolioValue, algorithm.Portfolio.TotalFees, totalTransactions); //Some users have $0 in their brokerage account / starting cash of $0. Prevent divide by zero errors var netReturn = _algorithmHandlers.Setup.StartingPortfolioValue > 0 ? (algorithm.Portfolio.TotalPortfolioValue - _algorithmHandlers.Setup.StartingPortfolioValue) / _algorithmHandlers.Setup.StartingPortfolioValue : 0; //Add other fixed parameters. banner.Add("Unrealized", "$" + algorithm.Portfolio.TotalUnrealizedProfit.ToString("N2")); banner.Add("Fees", "-$" + algorithm.Portfolio.TotalFees.ToString("N2")); banner.Add("Net Profit", "$" + algorithm.Portfolio.TotalProfit.ToString("N2")); banner.Add("Return", netReturn.ToString("P")); banner.Add("Equity", "$" + algorithm.Portfolio.TotalPortfolioValue.ToString("N2")); } } catch (Exception err) { Log.Error(err, "Error generating statistics packet"); } //Diagnostics Completed, Send Result Packet: var totalSeconds = (DateTime.Now - startTime).TotalSeconds; var dataPoints = algorithmManager.DataPoints + algorithm.HistoryProvider.DataPointCount; _algorithmHandlers.Results.DebugMessage( string.Format("Algorithm Id:({0}) completed in {1} seconds at {2}k data points per second. Processing total of {3} data points.", job.AlgorithmId, totalSeconds.ToString("F2"), ((dataPoints / (double)1000) / totalSeconds).ToString("F0"), dataPoints.ToString("N0"))); _algorithmHandlers.Results.SendFinalResult(job, orders, algorithm.Transactions.TransactionRecord, holdings, statisticsResults, banner); } catch (Exception err) { Log.Error(err, "Error sending analysis results"); } //Before we return, send terminate commands to close up the threads _algorithmHandlers.Transactions.Exit(); _algorithmHandlers.DataFeed.Exit(); _algorithmHandlers.RealTime.Exit(); } //Close result handler: _algorithmHandlers.Results.Exit(); statusPing.Exit(); //Wait for the threads to complete: var ts = Stopwatch.StartNew(); while ((_algorithmHandlers.Results.IsActive || (_algorithmHandlers.Transactions != null && _algorithmHandlers.Transactions.IsActive) || (_algorithmHandlers.DataFeed != null && _algorithmHandlers.DataFeed.IsActive) || (_algorithmHandlers.RealTime != null && _algorithmHandlers.RealTime.IsActive)) && ts.ElapsedMilliseconds < 30 * 1000) { Thread.Sleep(100); Log.Trace("Waiting for threads to exit..."); } //Terminate threads still in active state. if (threadFeed != null && threadFeed.IsAlive) { threadFeed.Abort(); } if (threadTransactions != null && threadTransactions.IsAlive) { threadTransactions.Abort(); } if (threadResults != null && threadResults.IsAlive) { threadResults.Abort(); } if (statusPingThread != null && statusPingThread.IsAlive) { statusPingThread.Abort(); } if (brokerage != null) { Log.Trace("Engine.Run(): Disconnecting from brokerage..."); brokerage.Disconnect(); } if (_algorithmHandlers.Setup != null) { Log.Trace("Engine.Run(): Disposing of setup handler..."); _algorithmHandlers.Setup.Dispose(); } Log.Trace("Engine.Main(): Analysis Completed and Results Posted."); } catch (Exception err) { Log.Error(err, "Error running algorithm"); } finally { //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions: if (_liveMode && algorithmManager.State != AlgorithmStatus.Running && algorithmManager.State != AlgorithmStatus.RuntimeError) { _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, algorithmManager.State); } _algorithmHandlers.Results.Exit(); _algorithmHandlers.DataFeed.Exit(); _algorithmHandlers.Transactions.Exit(); _algorithmHandlers.RealTime.Exit(); } }