/******************************************************** * PUBLIC CONSTRUCTOR *********************************************************/ /// <summary> /// Console result handler constructor. /// </summary> /// <remarks>Setup the default sampling and notification periods based on the backtest length.</remarks> public ConsoleResultHandler(AlgorithmNodePacket packet) { FinalStatistics = new Dictionary<string, string>(); Log.Trace("Launching Console Result Handler: QuantConnect v2.0"); Messages = new ConcurrentQueue<Packet>(); Charts = new ConcurrentDictionary<string, Chart>(); _chartLock = new Object(); _isActive = true; // we expect one of two types here, the backtest node packet or the live node packet if (packet is BacktestNodePacket) { var backtest = packet as BacktestNodePacket; _algorithmNode = new BacktestConsoleStatusHandler(backtest); } else { var live = packet as LiveNodePacket; if (live == null) { throw new ArgumentException("Unexpected AlgorithmNodeType: " + packet.GetType().Name); } _algorithmNode = new LiveConsoleStatusHandler(live); } _resamplePeriod = _algorithmNode.ComputeSampleEquityPeriod(); //Notification Period for pushes: _notificationPeriod = TimeSpan.FromSeconds(5); }
public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider) { if (algorithm.SubscriptionManager.Subscriptions.Count == 0 && algorithm.Universes.IsNullOrEmpty()) { throw new Exception("No subscriptions registered and no universe defined."); } _algorithm = algorithm; _resultHandler = resultHandler; _mapFileProvider = mapFileProvider; _subscriptions = new ConcurrentDictionary<Symbol, Subscription>(); _cancellationTokenSource = new CancellationTokenSource(); IsActive = true; Bridge = new BusyBlockingCollection<TimeSlice>(100); var ffres = Time.OneSecond; _fillForwardResolution = Ref.Create(() => ffres, res => ffres = res); // find the minimum resolution, ignoring ticks ffres = ResolveFillForwardResolution(algorithm); // add each universe selection subscription to the feed foreach (var universe in _algorithm.Universes) { var startTimeUtc = _algorithm.StartDate.ConvertToUtc(_algorithm.TimeZone); var endTimeUtc = _algorithm.EndDate.ConvertToUtc(_algorithm.TimeZone); AddUniverseSubscription(universe, startTimeUtc, endTimeUtc); } }
/// <summary> /// This method is called when a new job is received. /// </summary> /// <param name="job">The job that is being executed</param> public void Initialize(AlgorithmNodePacket job) { _job = job; //Show warnings if the API token and UID aren't set. if (_job.UserId == 0) { MessageBox.Show("Your user id is not set. Please check your config.json file 'job-user-id' property.", "LEAN Algorithmic Trading", MessageBoxButtons.OK, MessageBoxIcon.Error); } if (_job.Channel == "") { MessageBox.Show("Your API token is not set. Please check your config.json file 'api-access-token' property.", "LEAN Algorithmic Trading", MessageBoxButtons.OK, MessageBoxIcon.Error); } _liveMode = job is LiveNodePacket; var url = GetUrl(job, _liveMode); #if !__MonoCS__ _geckoBrowser.Navigate(url); #else _monoBrowser.Navigate(url); #endif }
/******************************************************** * PUBLIC CONSTRUCTOR *********************************************************/ /// <summary> /// Setup the algorithm data, cash, job start end date etc. /// </summary> public BacktestingRealTimeHandler(IAlgorithm algorithm, AlgorithmNodePacket job) { //Initialize: _algorithm = algorithm; _events = new List<RealTimeEvent>(); _job = job; }
/******************************************************** * PUBLIC CONSTRUCTOR *********************************************************/ /// <summary> /// Setup the algorithm data, cash, job start end date etc. /// </summary> public BacktestingRealTimeHandler(IAlgorithm algorithm, AlgorithmNodePacket job) { //Initialize: _algorithm = algorithm; _events = new List<RealTimeEvent>(); _job = job; _today = new Dictionary<SecurityType, MarketToday>(); }
public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler) { _job = job; }
/******************************************************** * PUBLIC CONSTRUCTOR *********************************************************/ /// <summary> /// Initialize the realtime event handler with all information required for triggering daily events. /// </summary> public LiveTradingRealTimeHandler(IAlgorithm algorithm, IDataFeed feed, IResultHandler results, IBrokerage brokerage, AlgorithmNodePacket job) { //Initialize: _algorithm = algorithm; _events = new List<RealTimeEvent>(); _today = new Dictionary<SecurityType, MarketToday>(); _feed = feed; _results = results; }
public Ping(AlgorithmManager algorithmManager, IApi api, IResultHandler resultHandler, IMessagingHandler messagingHandler, AlgorithmNodePacket job) { _api = api; _job = job; _resultHandler = resultHandler; _messagingHandler = messagingHandler; _algorithmManager = algorithmManager; _exitEvent = new ManualResetEventSlim(false); }
/// <summary> /// Initializes a new instance of the <see cref="DefaultBrokerageMessageHandler"/> class /// </summary> /// <param name="algorithm">The running algorithm</param> /// <param name="job">The job that produced the algorithm</param> /// <param name="api">The api for the algorithm</param> /// <param name="initialDelay"></param> /// <param name="openThreshold">Defines how long before market open to re-check for brokerage reconnect message</param> public DefaultBrokerageMessageHandler(IAlgorithm algorithm, AlgorithmNodePacket job, IApi api, TimeSpan? initialDelay = null, TimeSpan? openThreshold = null) { _api = api; _job = job; _algorithm = algorithm; _connected = true; _openThreshold = openThreshold ?? DefaultOpenThreshold; _initialDelay = initialDelay ?? DefaultInitialDelay; }
/// <summary> /// Initializes the data feed for the specified job and algorithm /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider) { _algorithm = algorithm; _resultHandler = resultHandler; _mapFileProvider = mapFileProvider; _factorFileProvider = factorFileProvider; _subscriptions = new SubscriptionCollection(); _universeSelection = new UniverseSelection(this, algorithm, job.Controls); _cancellationTokenSource = new CancellationTokenSource(); IsActive = true; var threadCount = Math.Max(1, Math.Min(4, Environment.ProcessorCount - 3)); _controller = new ParallelRunnerController(threadCount); _controller.Start(_cancellationTokenSource.Token); var ffres = Time.OneMinute; _fillForwardResolution = Ref.Create(() => ffres, res => ffres = res); // wire ourselves up to receive notifications when universes are added/removed algorithm.UniverseManager.CollectionChanged += (sender, args) => { switch (args.Action) { case NotifyCollectionChangedAction.Add: foreach (var universe in args.NewItems.OfType<Universe>()) { var config = universe.Configuration; var start = _frontierUtc != DateTime.MinValue ? _frontierUtc : _algorithm.StartDate.ConvertToUtc(_algorithm.TimeZone); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var exchangeHours = marketHoursDatabase.GetExchangeHours(config); Security security; if (!_algorithm.Securities.TryGetValue(config.Symbol, out security)) { // create a canonical security object if it doesn't exist security = new Security(exchangeHours, config, _algorithm.Portfolio.CashBook[CashBook.AccountCurrency], SymbolProperties.GetDefault(CashBook.AccountCurrency)); } var end = _algorithm.EndDate.ConvertToUtc(_algorithm.TimeZone); AddSubscription(new SubscriptionRequest(true, universe, security, config, start, end)); } break; case NotifyCollectionChangedAction.Remove: foreach (var universe in args.OldItems.OfType<Universe>()) { RemoveSubscription(universe.Configuration); } break; default: throw new NotImplementedException("The specified action is not implemented: " + args.Action); } }; }
/// <summary> /// Get the URL for the embedded charting /// </summary> /// <param name="job">Job packet for the URL</param> /// <param name="liveMode">Is this a live mode chart?</param> /// <param name="holdReady">Hold the ready signal to inject data</param> private static string GetUrl(AlgorithmNodePacket job, bool liveMode = false, bool holdReady = false) { var url = ""; var hold = holdReady == false ? "0" : "1"; var embedPage = liveMode ? "embeddedLive" : "embedded"; url = string.Format( "https://www.quantconnect.com/terminal/{0}?user={1}&token={2}&pid={3}&version={4}&holdReady={5}&bid={6}", embedPage, job.UserId, job.Channel, job.ProjectId, Globals.Version, hold, job.AlgorithmId); return url; }
/// <summary> /// Initializes the data feed for the specified job and algorithm /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider) { _algorithm = algorithm; _resultHandler = resultHandler; _mapFileProvider = mapFileProvider; _factorFileProvider = factorFileProvider; _subscriptions = new ConcurrentDictionary<Symbol, Subscription>(); _universeSelection = new UniverseSelection(this, algorithm, job.Controls); _cancellationTokenSource = new CancellationTokenSource(); IsActive = true; var threadCount = Math.Max(1, Math.Min(4, Environment.ProcessorCount - 3)); _controller = new ParallelRunnerController(threadCount); _controller.Start(_cancellationTokenSource.Token); var ffres = Time.OneSecond; _fillForwardResolution = Ref.Create(() => ffres, res => ffres = res); // find the minimum resolution, ignoring ticks ffres = ResolveFillForwardResolution(algorithm); // wire ourselves up to receive notifications when universes are added/removed algorithm.UniverseManager.CollectionChanged += (sender, args) => { switch (args.Action) { case NotifyCollectionChangedAction.Add: foreach (var universe in args.NewItems.OfType<Universe>()) { var start = _frontierUtc != DateTime.MinValue ? _frontierUtc : _algorithm.StartDate.ConvertToUtc(_algorithm.TimeZone); AddUniverseSubscription(universe, start, _algorithm.EndDate.ConvertToUtc(_algorithm.TimeZone)); } break; case NotifyCollectionChangedAction.Remove: foreach (var universe in args.OldItems.OfType<Universe>()) { Subscription subscription; if (_subscriptions.TryGetValue(universe.Configuration.Symbol, out subscription)) { RemoveSubscription(subscription); } } break; default: throw new NotImplementedException("The specified action is not implemented: " + args.Action); } }; }
//private GeckoWebBrowser _geckoBrowser; /// <summary> /// Create the UX. /// </summary> /// <param name="notificationHandler">Messaging system</param> /// <param name="job">Job to use for URL generation</param> public LeanWinForm(IMessagingHandler notificationHandler, AlgorithmNodePacket job) { InitializeComponent(); //Form Setup: CenterToScreen(); WindowState = FormWindowState.Maximized; Text = "QuantConnect Lean Algorithmic Trading Engine: v" + Globals.Version; //Save off the messaging event handler we need: _job = job; _liveMode = job is LiveNodePacket; _messaging = (EventMessagingHandler)notificationHandler; var url = GetUrl(job, _liveMode); //GECKO WEB BROWSER: Create the browser control // https://www.nuget.org/packages/GeckoFX/ // -> If you don't have IE. //_geckoBrowser = new GeckoWebBrowser { Dock = DockStyle.Fill, Name = "browser" }; //_geckoBrowser.DOMContentLoaded += BrowserOnDomContentLoaded; //_geckoBrowser.Navigate(url); //splitPanel.Panel1.Controls.Add(_geckoBrowser); // MONO WEB BROWSER: Create the browser control // Default shipped with VS and Mono. Works OK in Windows, and compiles in linux. _monoBrowser = new WebBrowser() {Dock = DockStyle.Fill, Name = "Browser"}; _monoBrowser.DocumentCompleted += MonoBrowserOnDocumentCompleted; _monoBrowser.Navigate(url); splitPanel.Panel1.Controls.Add(_monoBrowser); //Setup Event Handlers: _messaging.DebugEvent += MessagingOnDebugEvent; _messaging.LogEvent += MessagingOnLogEvent; _messaging.RuntimeErrorEvent += MessagingOnRuntimeErrorEvent; _messaging.HandledErrorEvent += MessagingOnHandledErrorEvent; _messaging.BacktestResultEvent += MessagingOnBacktestResultEvent; _logging = Log.LogHandler as QueueLogHandler; //Show warnings if the API token and UID aren't set. if (_job.UserId == 0) { MessageBox.Show("Your user id is not set. Please check your config.json file 'job-user-id' property.", "LEAN Algorithmic Trading", MessageBoxButtons.OK, MessageBoxIcon.Error); } if (_job.Channel == "") { MessageBox.Show("Your API token is not set. Please check your config.json file 'api-access-token' property.", "LEAN Algorithmic Trading", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
/// <summary> /// Intializes the real time handler for the specified algorithm and job /// </summary> public void Setup(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IApi api) { //Initialize: _api = api; _algorithm = algorithm; _resultHandler = resultHandler; _cancellationTokenSource = new CancellationTokenSource(); var todayInAlgorithmTimeZone = DateTime.UtcNow.ConvertFromUtc(_algorithm.TimeZone).Date; // refresh the market hours for today explicitly, and then set up an event to refresh them each day at midnight RefreshMarketHoursToday(todayInAlgorithmTimeZone); // every day at midnight from tomorrow until the end of time var times = from date in Time.EachDay(todayInAlgorithmTimeZone.AddDays(1), Time.EndOfTime) select date.ConvertToUtc(_algorithm.TimeZone); Add(new ScheduledEvent("RefreshMarketHours", times, (name, triggerTime) => { // refresh market hours from api every day RefreshMarketHoursToday(triggerTime.ConvertFromUtc(_algorithm.TimeZone).Date); })); // add end of day events for each tradeable day Add(ScheduledEventFactory.EveryAlgorithmEndOfDay(_algorithm, _resultHandler, todayInAlgorithmTimeZone, Time.EndOfTime, ScheduledEvent.AlgorithmEndOfDayDelta, DateTime.UtcNow)); // add end of trading day events for each security foreach (var security in _algorithm.Securities.Values.Where(x => !x.SubscriptionDataConfig.IsInternalFeed)) { // assumes security.Exchange has been updated with today's hours via RefreshMarketHoursToday Add(ScheduledEventFactory.EverySecurityEndOfDay(_algorithm, _resultHandler, security, todayInAlgorithmTimeZone, Time.EndOfTime, ScheduledEvent.SecurityEndOfDayDelta, DateTime.UtcNow)); } foreach (var scheduledEvent in _scheduledEvents) { // zoom past old events scheduledEvent.Value.SkipEventsUntil(algorithm.UtcTime); // set logging accordingly scheduledEvent.Value.IsLoggingEnabled = Log.DebuggingEnabled; } }
/// <summary> /// Intializes the real time handler for the specified algorithm and job /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IApi api) { //Initialize: _api = api; _algorithm = algorithm; _resultHandler = resultHandler; _events = new ConcurrentDictionary<string, ScheduledEvent>(); _cancellationTokenSource = new CancellationTokenSource(); var todayInAlgorithmTimeZone = DateTime.UtcNow.ConvertFromUtc(_algorithm.TimeZone).Date; // refresh the market hours for today explicitly, and then set up an event to refresh them each day at midnight RefreshMarketHoursToday(todayInAlgorithmTimeZone); // every day at midnight from tomorrow until the end of time var times = from date in Time.EachDay(todayInAlgorithmTimeZone.AddDays(1), Time.EndOfTime) select date.ConvertToUtc(_algorithm.TimeZone); AddEvent(new ScheduledEvent("RefreshMarketHours", times, (name, triggerTime) => { // refresh market hours from api every day RefreshMarketHoursToday(triggerTime); })); // add end of day events for each tradeable day AddEvent(ScheduledEvent.EveryAlgorithmEndOfDay(_algorithm, _resultHandler, todayInAlgorithmTimeZone, Time.EndOfTime, ScheduledEvent.AlgorithmEndOfDayDelta, DateTime.UtcNow)); // add end of trading day events for each security foreach (var security in _algorithm.Securities.Values) { // assumes security.Exchange has been updated with today's hours via RefreshMarketHoursToday AddEvent(ScheduledEvent.EverySecurityEndOfDay(_algorithm, _resultHandler, security, todayInAlgorithmTimeZone, Time.EndOfTime, ScheduledEvent.SecurityEndOfDayDelta, DateTime.UtcNow)); } foreach (var scheduledEvent in _events.Values) { scheduledEvent.IsLoggingEnabled = true; } }
/// <summary> /// Intializes the real time handler for the specified algorithm and job /// </summary> public void Setup(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IApi api) { //Initialize: _algorithm = algorithm; _resultHandler = resultHandler; // create events for algorithm's end of tradeable dates Add(ScheduledEventFactory.EveryAlgorithmEndOfDay(_algorithm, _resultHandler, _algorithm.StartDate, _algorithm.EndDate, ScheduledEvent.AlgorithmEndOfDayDelta)); // set up the events for each security to fire every tradeable date before market close foreach (var security in _algorithm.Securities.Values.Where(x => !x.SubscriptionDataConfig.IsInternalFeed)) { Add(ScheduledEventFactory.EverySecurityEndOfDay(_algorithm, _resultHandler, security, algorithm.StartDate, _algorithm.EndDate, ScheduledEvent.SecurityEndOfDayDelta)); } foreach (var scheduledEvent in _scheduledEvents) { // zoom past old events scheduledEvent.Value.SkipEventsUntil(algorithm.UtcTime); // set logging accordingly scheduledEvent.Value.IsLoggingEnabled = Log.DebuggingEnabled; } }
/// <summary> /// Intializes the real time handler for the specified algorithm and job /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IApi api) { //Initialize: _algorithm = algorithm; _resultHandler = resultHandler; _scheduledEvents = new ConcurrentDictionary<string, ScheduledEvent>(); // create events for algorithm's end of tradeable dates AddEvent(ScheduledEvent.EveryAlgorithmEndOfDay(_algorithm, _resultHandler, _algorithm.StartDate, _algorithm.EndDate, ScheduledEvent.AlgorithmEndOfDayDelta)); // set up the events for each security to fire every tradeable date before market close foreach (var security in _algorithm.Securities.Values) { AddEvent(ScheduledEvent.EverySecurityEndOfDay(_algorithm, _resultHandler, security, algorithm.StartDate, _algorithm.EndDate, ScheduledEvent.SecurityEndOfDayDelta)); } if (Log.DebuggingEnabled) { foreach (var scheduledEvent in _scheduledEvents) { scheduledEvent.Value.IsLoggingEnabled = true; } } }
/// <summary> /// Setup the algorithm cash, dates and portfolio as desired. /// </summary> /// <param name="algorithm">Algorithm instance</param> /// <param name="brokerage">Output new instance of the brokerage</param> /// <param name="job">Algorithm job/task we're running</param> /// <returns>Bool setup success</returns> public bool Setup(IAlgorithm algorithm, out IBrokerage brokerage, AlgorithmNodePacket job) { var initializeComplete = false; brokerage = new PaperBrokerage(algorithm); //For the console, let it set itself up primarily: try { //Algorithm is backtesting, not live: algorithm.SetLiveMode(true); //Set the live trading level asset/ram allocation limits. //Protects algorithm from linux killing the job by excess memory: switch (job.ServerType) { case ServerType.Server1024: algorithm.SetAssetLimits(100, 20, 10); break; case ServerType.Server2048: algorithm.SetAssetLimits(400, 50, 30); break; default: //512 algorithm.SetAssetLimits(50, 10, 5); break; } //Initialize the algorithm algorithm.Initialize(); } catch (Exception err) { Log.Error("PaperTradingSetupHandler.Setup(): " + err.Message); Errors.Add("Error setting up the paper trading algorithm; " + err.Message); } // Starting capital is portfolio cash: StartingCapital = algorithm.Portfolio.Cash; if (Errors.Count == 0) { initializeComplete = true; } return initializeComplete; }
/// <summary> /// Desktop/Local acknowledge the task processed. Nothing to do. /// </summary> /// <param name="job"></param> public void AcknowledgeJob(AlgorithmNodePacket job) { // Make the console window pause so we can read log output before exiting and killing the application completely Console.WriteLine("Engine.Main(): Analysis Complete. Press any key to continue."); System.Console.Read(); }
/// <summary> /// Intializes the real time handler for the specified algorithm and job /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IApi api) { //Initialize: _algorithm = algorithm; _events = new List<RealTimeEvent>(); _today = new Dictionary<SecurityType, MarketToday>(); _resultHandler = resultHandler; _api = api; }
/// <summary> /// Runs a single backtest/live job from the job queue /// </summary> /// <param name="job">The algorithm job to be processed</param> /// <param name="assemblyPath">The path to the algorithm's assembly</param> public void Run(AlgorithmNodePacket job, string assemblyPath) { var algorithm = default(IAlgorithm); var algorithmManager = new AlgorithmManager(_liveMode); //Start monitoring the backtest active status: var statusPing = new StateCheck.Ping(algorithmManager, _systemHandlers.Api, _algorithmHandlers.Results); var statusPingThread = new Thread(statusPing.Run); statusPingThread.Start(); try { //Reset thread holders. var initializeComplete = false; Thread threadFeed = null; Thread threadTransactions = null; Thread threadResults = null; Thread threadRealTime = null; //-> Initialize messaging system _systemHandlers.Notify.SetChannel(job.Channel); //-> Set the result handler type for this algorithm job, and launch the associated result thread. _algorithmHandlers.Results.Initialize(job, _systemHandlers.Notify, _systemHandlers.Api, _algorithmHandlers.DataFeed, _algorithmHandlers.Setup, _algorithmHandlers.Transactions); threadResults = new Thread(_algorithmHandlers.Results.Run, 0) {Name = "Result Thread"}; threadResults.Start(); IBrokerage brokerage = null; try { // Save algorithm to cache, load algorithm instance: algorithm = _algorithmHandlers.Setup.CreateAlgorithmInstance(assemblyPath, job.Language); //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method. initializeComplete = _algorithmHandlers.Setup.Setup(algorithm, out brokerage, job, _algorithmHandlers.Results, _algorithmHandlers.Transactions); //If there are any reasons it failed, pass these back to the IDE. if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || _algorithmHandlers.Setup.Errors.Count > 0) { initializeComplete = false; //Get all the error messages: internal in algorithm and external in setup handler. var errorMessage = String.Join(",", algorithm.ErrorMessages); errorMessage += String.Join(",", _algorithmHandlers.Setup.Errors); _algorithmHandlers.Results.RuntimeError(errorMessage); _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, errorMessage); } } catch (Exception err) { var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err.StackTrace; _algorithmHandlers.Results.RuntimeError(runtimeMessage, err.StackTrace); _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage); } //-> Using the job + initialization: load the designated handlers: if (initializeComplete) { //-> Reset the backtest stopwatch; we're now running the algorithm. var startTime = DateTime.Now; //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates. algorithm.SetAlgorithmId(job.AlgorithmId); algorithm.SetLiveMode(_liveMode); algorithm.SetLocked(); //Load the associated handlers for data, transaction and realtime events: _algorithmHandlers.Results.SetAlgorithm(algorithm); _algorithmHandlers.DataFeed.Initialize(algorithm, job, _algorithmHandlers.Results); _algorithmHandlers.Transactions.Initialize(algorithm, brokerage, _algorithmHandlers.Results); _algorithmHandlers.RealTime.Initialize(algorithm, job, _algorithmHandlers.Results, _systemHandlers.Api); //Set the error handlers for the brokerage asynchronous errors. _algorithmHandlers.Setup.SetupErrorHandler(_algorithmHandlers.Results, brokerage); //Send status to user the algorithm is now executing. _algorithmHandlers.Results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running); //Launch the data, transaction and realtime handlers into dedicated threads threadFeed = new Thread(_algorithmHandlers.DataFeed.Run) {Name = "DataFeed Thread"}; threadTransactions = new Thread(_algorithmHandlers.Transactions.Run) {Name = "Transaction Thread"}; threadRealTime = new Thread(_algorithmHandlers.RealTime.Run) {Name = "RealTime Thread"}; //Launch the data feed, result sending, and transaction models/handlers in separate threads. threadFeed.Start(); // Data feed pushing data packets into thread bridge; threadTransactions.Start(); // Transaction modeller scanning new order requests threadRealTime.Start(); // RealTime scan time for time based events: // Result manager scanning message queue: (started earlier) _algorithmHandlers.Results.DebugMessage(string.Format("Launching analysis for {0} with LEAN Engine v{1}", job.AlgorithmId, Constants.Version)); try { //Create a new engine isolator class var isolator = new Isolator(); // Execute the Algorithm Code: var complete = isolator.ExecuteWithTimeLimit(_algorithmHandlers.Setup.MaximumRuntime, algorithmManager.TimeLoopWithinLimits, () => { try { //Run Algorithm Job: // -> Using this Data Feed, // -> Send Orders to this TransactionHandler, // -> Send Results to ResultHandler. algorithmManager.Run(job, algorithm, _algorithmHandlers.DataFeed, _algorithmHandlers.Transactions, _algorithmHandlers.Results, _algorithmHandlers.RealTime, isolator.CancellationToken); } catch (Exception err) { //Debugging at this level is difficult, stack trace needed. Log.Error("Engine.Run", err); algorithm.RunTimeError = err; algorithmManager.SetStatus(AlgorithmStatus.RuntimeError); return; } Log.Trace("Engine.Run(): Exiting Algorithm Manager"); }, job.RamAllocation); if (!complete) { Log.Error("Engine.Main(): Failed to complete in time: " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F")); throw new Exception("Failed to complete algorithm within " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F") + " seconds. Please make it run faster."); } // Algorithm runtime error: if (algorithm.RunTimeError != null) { throw algorithm.RunTimeError; } } catch (Exception err) { //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed. Log.Error("Engine.Run(): Breaking out of parent try-catch: " + err.Message + " " + err.StackTrace); if (_algorithmHandlers.DataFeed != null) _algorithmHandlers.DataFeed.Exit(); if (_algorithmHandlers.Results != null) { var message = "Runtime Error: " + err.Message; Log.Trace("Engine.Run(): Sending runtime error to user..."); _algorithmHandlers.Results.LogMessage(message); _algorithmHandlers.Results.RuntimeError(message, err.StackTrace); _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, message + " Stack Trace: " + err.StackTrace); } } //Send result data back: this entire code block could be rewritten. // todo: - Split up statistics class, its enormous. // todo: - Make a dedicated Statistics.Benchmark class. // todo: - Move all creation and transmission of statistics out of primary engine loop. // todo: - Statistics.Generate(algorithm, resulthandler, transactionhandler); try { var charts = new Dictionary<string, Chart>(_algorithmHandlers.Results.Charts); var orders = new Dictionary<int, Order>(_algorithmHandlers.Transactions.Orders); var holdings = new Dictionary<string, Holding>(); var statistics = new Dictionary<string, string>(); var banner = new Dictionary<string, string>(); try { //Generates error when things don't exist (no charting logged, runtime errors in main algo execution) const string strategyEquityKey = "Strategy Equity"; const string equityKey = "Equity"; const string dailyPerformanceKey = "Daily Performance"; const string benchmarkKey = "Benchmark"; // make sure we've taken samples for these series before just blindly requesting them if (charts.ContainsKey(strategyEquityKey) && charts[strategyEquityKey].Series.ContainsKey(equityKey) && charts[strategyEquityKey].Series.ContainsKey(dailyPerformanceKey)) { var equity = charts[strategyEquityKey].Series[equityKey].Values; var performance = charts[strategyEquityKey].Series[dailyPerformanceKey].Values; var profitLoss = new SortedDictionary<DateTime, decimal>(algorithm.Transactions.TransactionRecord); var numberOfTrades = algorithm.Transactions.GetOrders(x => x.Status.IsFill()).Count(); var benchmark = charts[benchmarkKey].Series[benchmarkKey].Values.ToDictionary(chartPoint => Time.UnixTimeStampToDateTime(chartPoint.x), chartPoint => chartPoint.y); statistics = Statistics.Statistics.Generate(equity, profitLoss, performance, benchmark, _algorithmHandlers.Setup.StartingPortfolioValue, algorithm.Portfolio.TotalFees, numberOfTrades, 252); } } catch (Exception err) { Log.Error("Algorithm.Node.Engine(): Error generating statistics packet: " + err.Message); } //Diagnostics Completed, Send Result Packet: var totalSeconds = (DateTime.Now - startTime).TotalSeconds; _algorithmHandlers.Results.DebugMessage( string.Format("Algorithm Id:({0}) completed in {1} seconds at {2}k data points per second. Processing total of {3} data points.", job.AlgorithmId, totalSeconds.ToString("F2"), ((algorithmManager.DataPoints/(double) 1000)/totalSeconds).ToString("F0"), algorithmManager.DataPoints.ToString("N0"))); _algorithmHandlers.Results.SendFinalResult(job, orders, algorithm.Transactions.TransactionRecord, holdings, statistics, banner); } catch (Exception err) { Log.Error("Engine.Main(): Error sending analysis result: " + err.Message + " ST >> " + err.StackTrace); } //Before we return, send terminate commands to close up the threads _algorithmHandlers.Transactions.Exit(); _algorithmHandlers.DataFeed.Exit(); _algorithmHandlers.RealTime.Exit(); } //Close result handler: _algorithmHandlers.Results.Exit(); statusPing.Exit(); //Wait for the threads to complete: var ts = Stopwatch.StartNew(); while ((_algorithmHandlers.Results.IsActive || (_algorithmHandlers.Transactions != null && _algorithmHandlers.Transactions.IsActive) || (_algorithmHandlers.DataFeed != null && _algorithmHandlers.DataFeed.IsActive) || (_algorithmHandlers.RealTime != null && _algorithmHandlers.RealTime.IsActive)) && ts.ElapsedMilliseconds < 30*1000) { Thread.Sleep(100); Log.Trace("Waiting for threads to exit..."); } //Terminate threads still in active state. if (threadFeed != null && threadFeed.IsAlive) threadFeed.Abort(); if (threadTransactions != null && threadTransactions.IsAlive) threadTransactions.Abort(); if (threadResults != null && threadResults.IsAlive) threadResults.Abort(); if (statusPingThread != null && statusPingThread.IsAlive) statusPingThread.Abort(); if (brokerage != null) { brokerage.Disconnect(); } if (_algorithmHandlers.Setup != null) { _algorithmHandlers.Setup.Dispose(); } Log.Trace("Engine.Main(): Analysis Completed and Results Posted."); } catch (Exception err) { Log.Error("Engine.Main(): Error running algorithm: " + err.Message + " >> " + err.StackTrace); } finally { //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions: if (_liveMode && algorithmManager.State != AlgorithmStatus.Running && algorithmManager.State != AlgorithmStatus.RuntimeError) _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, algorithmManager.State); _algorithmHandlers.Results.Exit(); _algorithmHandlers.DataFeed.Exit(); _algorithmHandlers.Transactions.Exit(); _algorithmHandlers.RealTime.Exit(); } }
/// <summary> /// Creates a new <see cref="BacktestingBrokerage"/> instance /// </summary> /// <param name="algorithmNodePacket">Job packet</param> /// <param name="uninitializedAlgorithm">The algorithm instance before Initialize has been called</param> /// <returns>The brokerage instance, or throws if error creating instance</returns> public IBrokerage CreateBrokerage(AlgorithmNodePacket algorithmNodePacket, IAlgorithm uninitializedAlgorithm, out IBrokerageFactory factory) { factory = new BacktestingBrokerageFactory(); return new BacktestingBrokerage(uninitializedAlgorithm); }
/// <summary> /// Setup the algorithm cash, dates and data subscriptions as desired. /// </summary> /// <param name="algorithm">Algorithm instance</param> /// <param name="brokerage">Brokerage instance</param> /// <param name="baseJob">Algorithm job</param> /// <param name="resultHandler">The configured result handler</param> /// <param name="transactionHandler">The configurated transaction handler</param> /// <param name="realTimeHandler">The configured real time handler</param> /// <returns>Boolean true on successfully initializing the algorithm</returns> public bool Setup(IAlgorithm algorithm, IBrokerage brokerage, AlgorithmNodePacket baseJob, IResultHandler resultHandler, ITransactionHandler transactionHandler, IRealTimeHandler realTimeHandler) { var job = baseJob as BacktestNodePacket; if (job == null) { throw new ArgumentException("Expected BacktestNodePacket but received " + baseJob.GetType().Name); } Log.Trace(string.Format("BacktestingSetupHandler.Setup(): Setting up job: Plan: {0}, UID: {1}, PID: {2}, Version: {3}, Source: {4}", job.UserPlan, job.UserId, job.ProjectId, job.Version, job.RequestSource)); if (algorithm == null) { Errors.Add("Could not create instance of algorithm"); return false; } //Make sure the algorithm start date ok. if (job.PeriodStart == default(DateTime)) { Errors.Add("Algorithm start date was never set"); return false; } var controls = job.Controls; var isolator = new Isolator(); var initializeComplete = isolator.ExecuteWithTimeLimit(TimeSpan.FromMinutes(5), () => { try { //Set our parameters algorithm.SetParameters(job.Parameters); //Algorithm is backtesting, not live: algorithm.SetLiveMode(false); //Set the algorithm time before we even initialize: algorithm.SetDateTime(job.PeriodStart.ConvertToUtc(algorithm.TimeZone)); //Set the source impl for the event scheduling algorithm.Schedule.SetEventSchedule(realTimeHandler); //Initialise the algorithm, get the required data: algorithm.Initialize(); } catch (Exception err) { Errors.Add("Failed to initialize algorithm: Initialize(): " + err.Message); } }); //Before continuing, detect if this is ready: if (!initializeComplete) return false; algorithm.Transactions.SetOrderProcessor(transactionHandler); algorithm.PostInitialize(); //Calculate the max runtime for the strategy _maxRuntime = GetMaximumRuntime(job.PeriodStart, job.PeriodFinish, algorithm.SubscriptionManager.Count); //Get starting capital: _startingCaptial = algorithm.Portfolio.Cash; //Max Orders: 10k per backtest: if (job.UserPlan == UserPlan.Free) { _maxOrders = 10000; } else { _maxOrders = int.MaxValue; _maxRuntime += _maxRuntime; } //Set back to the algorithm, algorithm.SetMaximumOrders(_maxOrders); //Starting date of the algorithm: _startingDate = job.PeriodStart; //Put into log for debugging: Log.Trace("SetUp Backtesting: User: "******" ProjectId: " + job.ProjectId + " AlgoId: " + job.AlgorithmId); Log.Trace("Dates: Start: " + job.PeriodStart.ToShortDateString() + " End: " + job.PeriodFinish.ToShortDateString() + " Cash: " + _startingCaptial.ToString("C")); if (Errors.Count > 0) { initializeComplete = false; } return initializeComplete; }
/// <summary> /// Process log messages to ensure the meet the user caps and send them to storage. /// </summary> /// <param name="job">Algorithm job/task packet</param> /// <returns>String URL of log</returns> private string ProcessLogMessages(AlgorithmNodePacket job) { var remoteUrl = @"http://data.quantconnect.com/"; var logLength = 0; try { //Return nothing if there's no log messages to procesS: if (!_log.Any()) return ""; //Get the max length allowed for the algorithm: var allowance = Engine.Api.ReadLogAllowance(job.UserId, job.Channel); var logBacktestMax = allowance[0]; var logDailyMax = allowance[1]; var logRemaining = Math.Min(logBacktestMax, allowance[2]); //Minimum of maxium backtest or remaining allowance. var hitLimit = false; var serialized = ""; var key = "backtests/" + job.UserId + "/" + job.ProjectId + "/" + job.AlgorithmId + "-log.txt"; remoteUrl += key; foreach (var line in _log) { if ((logLength + line.Length) < logRemaining) { serialized += line + "\r\n"; logLength += line.Length; } else { var btMax = Math.Round((double)logBacktestMax / 1024, 0) + "kb"; var dyMax = Math.Round((double)logDailyMax / 1024, 0) + "kb"; //Same cap notice for both free & subscribers var requestMore = ""; var capNotice = "You currently have a maximum of " + btMax + " of log data per backtest, and " + dyMax + " total max per day."; DebugMessage("You currently have a maximum of " + btMax + " of log data per backtest remaining, and " + dyMax + " total max per day."); //Data providers set max log limits and require email requests for extensions if (job.UserPlan == UserPlan.Free) { requestMore ="Please upgrade your account and contact us to request more allocation here: https://www.quantconnect.com/contact"; } else { requestMore = "If you require more please briefly explain request for more allocation here: https://www.quantconnect.com/contact"; } DebugMessage(requestMore); serialized += capNotice; serialized += requestMore; hitLimit = true; break; } } //Save the log: Upload this file to S3: Engine.Api.Store(serialized, key, StoragePermissions.Public); //Record the data usage: Engine.Api.UpdateDailyLogUsed(job.UserId, job.AlgorithmId, remoteUrl, logLength, job.Channel, hitLimit); } catch (Exception err) { Log.Error("BacktestingResultHandler.ProcessLogMessages(): " + err.Message); } Log.Trace("BacktestingResultHandler.ProcessLogMessages(): Ready: " + remoteUrl); return remoteUrl; }
/// <summary> /// Send a final analysis result back to the IDE. /// </summary> /// <param name="job">Lean AlgorithmJob task</param> /// <param name="orders">Collection of orders from the algorithm</param> /// <param name="profitLoss">Collection of time-profit values for the algorithm</param> /// <param name="holdings">Current holdings state for the algorithm</param> /// <param name="statistics">Statistics information for the algorithm (empty if not finished)</param> /// <param name="banner">Runtime statistics banner information</param> public void SendFinalResult(AlgorithmNodePacket job, Dictionary<int, Order> orders, Dictionary<DateTime, decimal> profitLoss, Dictionary<string, Holding> holdings, Dictionary<string, string> statistics, Dictionary<string, string> banner) { try { //Convert local dictionary: var charts = new Dictionary<string, Chart>(Charts); _processingFinalPacket = true; //Create a result packet to send to the browser. BacktestResultPacket result = new BacktestResultPacket((BacktestNodePacket) job, new BacktestResult(charts, orders, profitLoss, statistics), 1m) { ProcessingTime = (DateTime.Now - _startTime).TotalSeconds, DateFinished = DateTime.Now, Progress = 1 }; //Place result into storage. StoreResult(result); //Truncate packet to fit within 32kb of messaging limits. result.Results = new BacktestResult(); //Second, send the truncated packet: Engine.Notify.BacktestResult(result, finalPacket: true); Log.Trace("BacktestingResultHandler.SendAnalysisResult(): Processed final packet"); } catch (Exception err) { Log.Error("Algorithm.Worker.SendResult(): " + err.Message); } }
/// <summary> /// Setup the algorithm cash, dates and portfolio as desired. /// </summary> /// <param name="algorithm">Existing algorithm instance</param> /// <param name="brokerage">New brokerage instance</param> /// <param name="baseJob">Backtesting job</param> /// <param name="resultHandler"></param> /// <returns>Boolean true on successfully setting up the console.</returns> public bool Setup(IAlgorithm algorithm, out IBrokerage brokerage, AlgorithmNodePacket baseJob, IResultHandler resultHandler) { var initializeComplete = false; try { //Set common variables for console programs: if (baseJob.Type == PacketType.BacktestNode) { var backtestJob = baseJob as BacktestNodePacket; //Set the limits on the algorithm assets (for local no limits) algorithm.SetAssetLimits(999, 999, 999); algorithm.SetMaximumOrders(int.MaxValue); //Setup Base Algorithm: algorithm.Initialize(); //Add currency data feeds that weren't explicity added in Initialize algorithm.Portfolio.CashBook.EnsureCurrencyDataFeeds(algorithm.Securities, algorithm.SubscriptionManager); //Construct the backtest job packet: backtestJob.PeriodStart = algorithm.StartDate; backtestJob.PeriodFinish = algorithm.EndDate; backtestJob.BacktestId = "LOCALHOST"; backtestJob.UserId = 1001; backtestJob.Type = PacketType.BacktestNode; //Backtest Specific Parameters: StartingDate = backtestJob.PeriodStart; StartingPortfolioValue = algorithm.Portfolio.Cash; } else { throw new Exception("The ConsoleSetupHandler is for backtests only. Use the BrokerageSetupHandler."); } } catch (Exception err) { Log.Error("ConsoleSetupHandler().Setup(): " + err.Message); Errors.Add("Failed to initialize algorithm: Initialize(): " + err.Message); } if (Errors.Count == 0) { initializeComplete = true; } // we need to do this after algorithm initialization brokerage = new BacktestingBrokerage(algorithm); // set the transaction models base on the requested brokerage properties SetupHandler.UpdateTransactionModels(algorithm, algorithm.BrokerageModel); return initializeComplete; }
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="setup">Setup handler object</param> /// <param name="realtime">Realtime processing object</param> /// <remarks>Modify with caution</remarks> public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime) { //Initialize: var backwardsCompatibilityMode = false; var tradebarsType = typeof (TradeBars); var ticksType = typeof(Ticks); var startingPerformance = setup.StartingCapital; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary<Type, MethodInvoker>(); //Initialize Properties: _frontier = setup.StartingDate; _runtimeError = null; _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = setup.StartingDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: //Algorithm 1.0 Data Accessors. //If the users defined these methods, add them in manually. This allows keeping backwards compatibility to algorithm 1.0. var oldTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnTradeBar", new[] { typeof(Dictionary<string, TradeBar>) }); var oldTicksMethodInfo = (algorithm.GetType()).GetMethod("OnTick", new[] { typeof(Dictionary<string, List<Tick>>) }); //Algorithm 2.0 Data Generics Accessors. //New hidden access to tradebars with custom type. var newTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { tradebarsType }); var newTicksMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { ticksType }); if (newTradeBarsMethodInfo == null && newTicksMethodInfo == null) { backwardsCompatibilityMode = true; if (oldTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, oldTradeBarsMethodInfo.DelegateForCallMethod()); if (oldTradeBarsMethodInfo != null) methodInvokers.Add(ticksType, oldTicksMethodInfo.DelegateForCallMethod()); } else { backwardsCompatibilityMode = false; if (newTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, newTradeBarsMethodInfo.DelegateForCallMethod()); if (newTicksMethodInfo != null) methodInvokers.Add(ticksType, newTicksMethodInfo.DelegateForCallMethod()); } //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //Is we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) continue; //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null) { _runtimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop."); foreach (var newData in DataStream.GetData(feed, setup.StartingDate)) { //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) break; //Go over each time stamp we've collected, pass it into the algorithm in order: foreach (var time in newData.Keys) { //Set the time frontier: _frontier = time; //Execute with TimeLimit Monitor: if (Isolator.IsCancellationRequested) return; //Refresh the realtime event monitor: realtime.SetTime(time); //Fire EOD if the time packet we just processed is greater if (backtestMode && _previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); if (startingPerformance == 0) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); } startingPerformance = algorithm.Portfolio.TotalPortfolioValue; } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; break; } //Pass in the new time first: algorithm.SetDateTime(time); //Trigger the data events: Invoke the types we have data for: var oldBars = new Dictionary<string, TradeBar>(); var oldTicks = new Dictionary<string, List<Tick>>(); var newBars = new TradeBars(time); var newTicks = new Ticks(time); //Invoke all non-tradebars, non-ticks methods: // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData[time].Keys) { //Data point and config of this point: var dataPoints = newData[time][i]; var config = feed.Subscriptions[i]; //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. foreach (var dataPoint in dataPoints) { //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, dataPoint); //Update registered consolidators for this symbol index for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } switch (config.Type.Name) { case "TradeBar": var bar = dataPoint as TradeBar; try { if (bar != null) { if (backwardsCompatibilityMode) { if (!oldBars.ContainsKey(bar.Symbol)) oldBars.Add(bar.Symbol, bar); } else { if (!newBars.ContainsKey(bar.Symbol)) newBars.Add(bar.Symbol, bar); } } } catch (Exception err) { Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C")); Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message); } break; case "Tick": var tick = dataPoint as Tick; if (tick != null) { if (backwardsCompatibilityMode) { if (!oldTicks.ContainsKey(tick.Symbol)) { oldTicks.Add(tick.Symbol, new List<Tick>()); } oldTicks[tick.Symbol].Add(tick); } else { if (!newTicks.ContainsKey(tick.Symbol)) { newTicks.Add(tick.Symbol, new List<Tick>()); } newTicks[tick.Symbol].Add(tick); } } break; default: //Send data into the generic algorithm event handlers try { methodInvokers[config.Type](algorithm, dataPoint); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } break; } } } //After we've fired all other events in this second, fire the pricing events: if (backwardsCompatibilityMode) { //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers..."); try { if (oldTradeBarsMethodInfo != null && oldBars.Count > 0) methodInvokers[tradebarsType](algorithm, oldBars); if (oldTicksMethodInfo != null && oldTicks.Count > 0) methodInvokers[ticksType](algorithm, oldTicks); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } else { //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers..."); try { if (newTradeBarsMethodInfo != null && newBars.Count > 0) methodInvokers[tradebarsType](algorithm, newBars); if (newTicksMethodInfo != null && newTicks.Count > 0) methodInvokers[ticksType](algorithm, newTicks); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; } // End of Time Loop // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach DataStream //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithmState = AlgorithmStatus.RuntimeError; _runtimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); }
/// <summary> /// Setup the algorithm cash, dates and data subscriptions as desired. /// </summary> /// <param name="algorithm">Algorithm instance</param> /// <param name="brokerage">Brokerage instance</param> /// <param name="baseJob">Algorithm job</param> /// <returns>Boolean true on successfully initializing the algorithm</returns> public bool Setup(IAlgorithm algorithm, out IBrokerage brokerage, AlgorithmNodePacket baseJob) { var job = baseJob as BacktestNodePacket; if (job == null) { throw new ArgumentException("Expected BacktestNodePacket but received " + baseJob.GetType().Name); } // Must be set since its defined as an out parameters brokerage = new BacktestingBrokerage(algorithm); if (algorithm == null) { Errors.Add("Could not create instance of algorithm"); return false; } //Make sure the algorithm start date ok. if (job.PeriodStart == default(DateTime)) { Errors.Add("Algorithm start date was never set"); return false; } //Execute the initialize code: var initializeComplete = Isolator.ExecuteWithTimeLimit(TimeSpan.FromSeconds(10), () => { try { //Algorithm is backtesting, not live: algorithm.SetLiveMode(false); //Set the backtest level asset ram allocation limits algorithm.SetAssetLimits(500, 100, 30); //Set the algorithm time before we even initialize: algorithm.SetDateTime(job.PeriodStart); //Initialise the algorithm, get the required data: algorithm.Initialize(); } catch (Exception err) { Errors.Add("Failed to initialize algorithm: Initialize(): " + err.Message); } }); //Before continuing, detect if this is ready: if (!initializeComplete) return false; //Calculate the max runtime for the strategy _maxRuntime = GetMaximumRuntime(job.PeriodStart, job.PeriodFinish, algorithm.SubscriptionManager.Count); //Get starting capital: _startingCaptial = algorithm.Portfolio.Cash; //Max Orders: 100 per day: _maxOrders = (int)(job.PeriodFinish - job.PeriodStart).TotalDays * 100; //Starting date of the algorithm: _startingDate = job.PeriodStart; //Put into log for debugging: Log.Trace("SetUp Backtesting: User: "******" ProjectId: " + job.ProjectId + " AlgoId: " + job.AlgorithmId); Log.Trace("Dates: Start: " + job.PeriodStart.ToShortDateString() + " End: " + job.PeriodFinish.ToShortDateString() + " Cash: " + _startingCaptial.ToString("C")); if (Errors.Count > 0) { initializeComplete = false; } return initializeComplete; }
/// <summary> /// Initializes the data feed for the specified job and algorithm /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider) { if (!(job is LiveNodePacket)) { throw new ArgumentException("The LiveTradingDataFeed requires a LiveNodePacket."); } _cancellationTokenSource = new CancellationTokenSource(); _algorithm = algorithm; _job = (LiveNodePacket) job; _resultHandler = resultHandler; _timeProvider = GetTimeProvider(); _dataQueueHandler = GetDataQueueHandler(); _frontierTimeProvider = new ManualTimeProvider(_timeProvider.GetUtcNow()); _customExchange = new BaseDataExchange("CustomDataExchange") {SleepInterval = 10}; // sleep is controlled on this exchange via the GetNextTicksEnumerator _exchange = new BaseDataExchange("DataQueueExchange"){SleepInterval = 0}; _exchange.AddEnumerator(DataQueueHandlerSymbol, GetNextTicksEnumerator()); _subscriptions = new SubscriptionCollection(); _bridge = new BusyBlockingCollection<TimeSlice>(); _universeSelection = new UniverseSelection(this, algorithm, job.Controls); // run the exchanges Task.Run(() => _exchange.Start(_cancellationTokenSource.Token)); Task.Run(() => _customExchange.Start(_cancellationTokenSource.Token)); // this value will be modified via calls to AddSubscription/RemoveSubscription var ffres = Time.OneMinute; _fillForwardResolution = Ref.Create(() => ffres, v => ffres = v); // wire ourselves up to receive notifications when universes are added/removed var start = _timeProvider.GetUtcNow(); algorithm.UniverseManager.CollectionChanged += (sender, args) => { switch (args.Action) { case NotifyCollectionChangedAction.Add: foreach (var universe in args.NewItems.OfType<Universe>()) { var config = universe.Configuration; var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var exchangeHours = marketHoursDatabase.GetExchangeHours(config); Security security; if (!_algorithm.Securities.TryGetValue(config.Symbol, out security)) { // create a canonical security object security = new Security(exchangeHours, config, _algorithm.Portfolio.CashBook[CashBook.AccountCurrency], SymbolProperties.GetDefault(CashBook.AccountCurrency)); } AddSubscription(new SubscriptionRequest(true, universe, security, config, start, Time.EndOfTime)); // Not sure if this is needed but left here because of this: // https://github.com/QuantConnect/Lean/commit/029d70bde6ca83a1eb0c667bb5cc4444bea05678 UpdateFillForwardResolution(); } break; case NotifyCollectionChangedAction.Remove: foreach (var universe in args.OldItems.OfType<Universe>()) { RemoveSubscription(universe.Configuration); } break; default: throw new NotImplementedException("The specified action is not implemented: " + args.Action); } }; }
public void SendFinalResult(AlgorithmNodePacket job, Dictionary<int, Order> orders, Dictionary<DateTime, decimal> profitLoss, Dictionary<string, Holding> holdings, StatisticsResults statisticsResults, Dictionary<string, string> banner) { }