/// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="realtime">Realtime processing object</param> /// <param name="commands">The command queue for relaying extenal commands to the algorithm</param> /// <param name="token">Cancellation token</param> /// <remarks>Modify with caution</remarks> public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, ICommandQueueHandler commands, CancellationToken token) { //Initialize: _dataPointCount = 0; _algorithm = algorithm; var portfolioValue = algorithm.Portfolio.TotalPortfolioValue; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; var settlementScanFrequency = TimeSpan.FromMinutes(30); var nextSettlementScanTime = DateTime.MinValue; var delistingTickets = new List <OrderTicket>(); //Initialize Properties: _algorithmId = job.AlgorithmId; _algorithm.Status = AlgorithmStatus.Running; _previousTime = algorithm.StartDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker <Ticks>(algorithm, methodInvokers); // dividend and split events var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker <Splits>(algorithm, methodInvokers); var hasOnDataDelistings = AddMethodInvoker <Delistings>(algorithm, methodInvokers); var hasOnDataSymbolChangedEvents = AddMethodInvoker <SymbolChangedEvents>(algorithm, methodInvokers); // Algorithm 3.0 data accessors var hasOnDataSlice = algorithm.GetType().GetMethods() .Where(x => x.Name == "OnData" && x.GetParameters().Length == 1 && x.GetParameters()[0].ParameterType == typeof(Slice)) .FirstOrDefault(x => x.DeclaringType == algorithm.GetType()) != null; //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in algorithm.SubscriptionManager.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick" && !config.IsInternalFeed) { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null && !hasOnDataSlice) { algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithm.Status = AlgorithmStatus.RuntimeError; return; } if (genericMethod != null) { methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Trace("AlgorithmManager.Run(): Begin DataStream - Start: " + algorithm.StartDate + " Stop: " + algorithm.EndDate); foreach (var timeSlice in Stream(job, algorithm, feed, results, token)) { // reset our timer on each loop _currentTimeStepTime = DateTime.UtcNow; //Check this backtest is still running: if (_algorithm.Status != AlgorithmStatus.Running) { Log.Error(string.Format("AlgorithmManager.Run(): Algorithm state changed to {0} at {1}", _algorithm.Status, timeSlice.Time)); break; } //Execute with TimeLimit Monitor: if (token.IsCancellationRequested) { Log.Error("AlgorithmManager.Run(): CancellationRequestion at " + timeSlice.Time); return; } // before doing anything, check our command queue foreach (var command in commands.GetCommands()) { if (command == null) { continue; } Log.Trace("AlgorithmManager.Run(): Executing {0}", command); CommandResultPacket result; try { result = command.Run(algorithm); } catch (Exception err) { Log.Error(err); algorithm.Error(string.Format("{0} Error: {1}", command.GetType().Name, err.Message)); result = new CommandResultPacket(command, false); } // send the result of the command off to the result handler results.Messages.Enqueue(result); } var time = timeSlice.Time; _dataPointCount += timeSlice.DataPointCount; //If we're in backtest mode we need to capture the daily performance. We do this here directly //before updating the algorithm state with the new data from this time step, otherwise we'll //produce incorrect samples (they'll take into account this time step's new price values) if (backtestMode) { //On day-change sample equity and daily performance for statistics calculations if (_previousTime.Date != time.Date) { SampleBenchmark(algorithm, results, _previousTime.Date); //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); //Check for divide by zero if (portfolioValue == 0m) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - portfolioValue) * 100 / portfolioValue, 10)); } portfolioValue = algorithm.Portfolio.TotalPortfolioValue; } } else { // live mode continously sample the benchmark SampleBenchmark(algorithm, results, time); } //Update algorithm state after capturing performance from previous day //Set the algorithm and real time handler's time algorithm.SetDateTime(time); if (timeSlice.Slice.SymbolChangedEvents.Count != 0) { if (hasOnDataSymbolChangedEvents) { methodInvokers[typeof(SymbolChangedEvents)](algorithm, timeSlice.Slice.SymbolChangedEvents); } foreach (var symbol in timeSlice.Slice.SymbolChangedEvents.Keys) { // cancel all orders for the old symbol foreach (var ticket in transactions.GetOrderTickets(x => x.Status.IsOpen() && x.Symbol == symbol)) { ticket.Cancel("Open order cancelled on symbol changed event"); } } } if (timeSlice.SecurityChanges != SecurityChanges.None) { foreach (var security in timeSlice.SecurityChanges.AddedSecurities) { if (!algorithm.Securities.ContainsKey(security.Symbol)) { // add the new security algorithm.Securities.Add(security); } } } //On each time step push the real time prices to the cashbook so we can have updated conversion rates foreach (var kvp in timeSlice.CashBookUpdateData) { kvp.Key.Update(kvp.Value); } //Update the securities properties: first before calling user code to avoid issues with data foreach (var kvp in timeSlice.SecuritiesUpdateData) { kvp.Key.SetMarketPrice(kvp.Value); // Send market price updates to the TradeBuilder if (kvp.Value != null) { algorithm.TradeBuilder.SetMarketPrice(kvp.Key.Symbol, kvp.Value.Price); } } // fire real time events after we've updated based on the new data realtime.SetTime(timeSlice.Time); // process fill models on the updated data before entering algorithm, applies to all non-market orders transactions.ProcessSynchronousEvents(); if (delistingTickets.Count != 0) { for (int i = 0; i < delistingTickets.Count; i++) { var ticket = delistingTickets[i]; if (ticket.Status == OrderStatus.Filled) { algorithm.Securities.Remove(ticket.Symbol); delistingTickets.RemoveAt(i--); Log.Trace("AlgorithmManager.Run(): Delisted Security removed: " + ticket.Symbol.ToString()); } } } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.Status == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Algorithm quit requested."); break; } if (algorithm.RunTimeError != null) { _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Trace(string.Format("AlgorithmManager.Run(): Algorithm encountered a runtime error at {0}. Error: {1}", timeSlice.Time, algorithm.RunTimeError)); break; } // perform margin calls, in live mode we can also use realtime to emit these if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.UtcNow)) { // determine if there are possible margin call orders to be executed bool issueMarginCallWarning; var marginCallOrders = algorithm.Portfolio.ScanForMarginCall(out issueMarginCallWarning); if (marginCallOrders.Count != 0) { var executingMarginCall = false; try { // tell the algorithm we're about to issue the margin call algorithm.OnMarginCall(marginCallOrders); executingMarginCall = true; // execute the margin call orders var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var ticket in executedTickets) { algorithm.Error(string.Format("{0} - Executed MarginCallOrder: {1} - Quantity: {2} @ {3}", algorithm.Time, ticket.Symbol, ticket.Quantity, ticket.AverageFillPrice)); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; var locator = executingMarginCall ? "Portfolio.MarginCallModel.ExecuteMarginCall" : "OnMarginCall"; Log.Error(string.Format("AlgorithmManager.Run(): RuntimeError: {0}: ", locator) + err); return; } } // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm else if (issueMarginCallWarning) { try { algorithm.OnMarginCallWarning(); } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCallWarning: " + err); return; } } nextMarginCallTime = time + marginCallFrequency; } // perform check for settlement of unsettled funds if (time >= nextSettlementScanTime || (_liveMode && nextSettlementScanTime > DateTime.UtcNow)) { algorithm.Portfolio.ScanForCashSettlement(algorithm.UtcTime); nextSettlementScanTime = time + settlementScanFrequency; } // before we call any events, let the algorithm know about universe changes if (timeSlice.SecurityChanges != SecurityChanges.None) { try { algorithm.OnSecuritiesChanged(timeSlice.SecurityChanges); } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnSecuritiesChanged event: " + err); return; } } // apply dividends foreach (var dividend in timeSlice.Slice.Dividends.Values) { Log.Trace("AlgorithmManager.Run(): Applying Dividend for " + dividend.Symbol.ToString(), true); algorithm.Portfolio.ApplyDividend(dividend); } // apply splits foreach (var split in timeSlice.Slice.Splits.Values) { try { Log.Trace("AlgorithmManager.Run(): Applying Split for " + split.Symbol.ToString(), true); algorithm.Portfolio.ApplySplit(split); // apply the split to open orders as well in raw mode, all other modes are split adjusted if (_liveMode || algorithm.Securities[split.Symbol].SubscriptionDataConfig.DataNormalizationMode == DataNormalizationMode.Raw) { // in live mode we always want to have our order match the order at the brokerage, so apply the split to the orders var openOrders = transactions.GetOrderTickets(ticket => ticket.Status.IsOpen() && ticket.Symbol == split.Symbol); algorithm.BrokerageModel.ApplySplit(openOrders.ToList(), split); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Split event: " + err); return; } } //Update registered consolidators for this symbol index try { foreach (var kvp in timeSlice.ConsolidatorUpdateData) { var consolidators = kvp.Key.Consolidators; foreach (var dataPoint in kvp.Value) { foreach (var consolidator in consolidators) { consolidator.Update(dataPoint); } } } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err); return; } // fire custom event handlers foreach (var kvp in timeSlice.CustomData) { MethodInvoker methodInvoker; var type = kvp.Key.SubscriptionDataConfig.Type; if (!methodInvokers.TryGetValue(type, out methodInvoker)) { continue; } try { foreach (var dataPoint in kvp.Value) { if (type.IsInstanceOfType(dataPoint)) { methodInvoker(algorithm, dataPoint); } } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err); return; } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && timeSlice.Slice.Dividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, timeSlice.Slice.Dividends); } if (hasOnDataSplits && timeSlice.Slice.Splits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, timeSlice.Slice.Splits); } if (hasOnDataDelistings && timeSlice.Slice.Delistings.Count != 0) { methodInvokers[typeof(Delistings)](algorithm, timeSlice.Slice.Delistings); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Dividends/Splits/Delistings: " + err); return; } // run the delisting logic after firing delisting events HandleDelistedSymbols(algorithm, timeSlice.Slice.Delistings, delistingTickets); //After we've fired all other events in this second, fire the pricing events: try { if (hasOnDataTradeBars && timeSlice.Slice.Bars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, timeSlice.Slice.Bars); } if (hasOnDataTicks && timeSlice.Slice.Ticks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, timeSlice.Slice.Ticks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err); return; } try { if (timeSlice.Slice.HasData) { // EVENT HANDLER v3.0 -- all data in a single event algorithm.OnData(timeSlice.Slice); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Slice: " + err); return; } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach feed.Bridge.GetConsumingEnumerable // stop timing the loops _currentTimeStepTime = DateTime.MinValue; //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithm.Status = AlgorithmStatus.RuntimeError; algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Error("AlgorithmManager.OnEndOfAlgorithm(): " + err); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithm.Status == AlgorithmStatus.Liquidated && _liveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithm.Status == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithm.Status == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); SampleBenchmark(algorithm, results, _previousTime); results.SamplePerformance(_previousTime, Math.Round((algorithm.Portfolio.TotalPortfolioValue - portfolioValue) * 100 / portfolioValue, 10)); } // End of Run();
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="setup">Setup handler object</param> /// <param name="realtime">Realtime processing object</param> /// <remarks>Modify with caution</remarks> public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime) { //Initialize: var backwardsCompatibilityMode = false; var tradebarsType = typeof (TradeBars); var ticksType = typeof(Ticks); var startingPerformance = setup.StartingCapital; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary<Type, MethodInvoker>(); //Initialize Properties: _frontier = setup.StartingDate; _runtimeError = null; _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = setup.StartingDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: //Algorithm 1.0 Data Accessors. //If the users defined these methods, add them in manually. This allows keeping backwards compatibility to algorithm 1.0. var oldTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnTradeBar", new[] { typeof(Dictionary<string, TradeBar>) }); var oldTicksMethodInfo = (algorithm.GetType()).GetMethod("OnTick", new[] { typeof(Dictionary<string, List<Tick>>) }); //Algorithm 2.0 Data Generics Accessors. //New hidden access to tradebars with custom type. var newTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { tradebarsType }); var newTicksMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { ticksType }); if (newTradeBarsMethodInfo == null && newTicksMethodInfo == null) { backwardsCompatibilityMode = true; if (oldTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, oldTradeBarsMethodInfo.DelegateForCallMethod()); if (oldTradeBarsMethodInfo != null) methodInvokers.Add(ticksType, oldTicksMethodInfo.DelegateForCallMethod()); } else { backwardsCompatibilityMode = false; if (newTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, newTradeBarsMethodInfo.DelegateForCallMethod()); if (newTicksMethodInfo != null) methodInvokers.Add(ticksType, newTicksMethodInfo.DelegateForCallMethod()); } //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //Is we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) continue; //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null) { _runtimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop."); foreach (var newData in DataStream.GetData(feed, setup.StartingDate)) { //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) break; //Go over each time stamp we've collected, pass it into the algorithm in order: foreach (var time in newData.Keys) { //Set the time frontier: _frontier = time; //Execute with TimeLimit Monitor: if (Isolator.IsCancellationRequested) return; //Refresh the realtime event monitor: realtime.SetTime(time); //Fire EOD if the time packet we just processed is greater if (backtestMode && _previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); if (startingPerformance == 0) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); } startingPerformance = algorithm.Portfolio.TotalPortfolioValue; } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; break; } //Pass in the new time first: algorithm.SetDateTime(time); //Trigger the data events: Invoke the types we have data for: var oldBars = new Dictionary<string, TradeBar>(); var oldTicks = new Dictionary<string, List<Tick>>(); var newBars = new TradeBars(time); var newTicks = new Ticks(time); //Invoke all non-tradebars, non-ticks methods: // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData[time].Keys) { //Data point and config of this point: var dataPoints = newData[time][i]; var config = feed.Subscriptions[i]; //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. foreach (var dataPoint in dataPoints) { //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, dataPoint); //Update registered consolidators for this symbol index for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } switch (config.Type.Name) { case "TradeBar": var bar = dataPoint as TradeBar; try { if (bar != null) { if (backwardsCompatibilityMode) { if (!oldBars.ContainsKey(bar.Symbol)) oldBars.Add(bar.Symbol, bar); } else { if (!newBars.ContainsKey(bar.Symbol)) newBars.Add(bar.Symbol, bar); } } } catch (Exception err) { Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C")); Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message); } break; case "Tick": var tick = dataPoint as Tick; if (tick != null) { if (backwardsCompatibilityMode) { if (!oldTicks.ContainsKey(tick.Symbol)) { oldTicks.Add(tick.Symbol, new List<Tick>()); } oldTicks[tick.Symbol].Add(tick); } else { if (!newTicks.ContainsKey(tick.Symbol)) { newTicks.Add(tick.Symbol, new List<Tick>()); } newTicks[tick.Symbol].Add(tick); } } break; default: //Send data into the generic algorithm event handlers try { methodInvokers[config.Type](algorithm, dataPoint); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } break; } } } //After we've fired all other events in this second, fire the pricing events: if (backwardsCompatibilityMode) { //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers..."); try { if (oldTradeBarsMethodInfo != null && oldBars.Count > 0) methodInvokers[tradebarsType](algorithm, oldBars); if (oldTicksMethodInfo != null && oldTicks.Count > 0) methodInvokers[ticksType](algorithm, oldTicks); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } else { //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers..."); try { if (newTradeBarsMethodInfo != null && newBars.Count > 0) methodInvokers[tradebarsType](algorithm, newBars); if (newTicksMethodInfo != null && newTicks.Count > 0) methodInvokers[ticksType](algorithm, newTicks); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; } // End of Time Loop // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach DataStream //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithmState = AlgorithmStatus.RuntimeError; _runtimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); }
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="setup">Setup handler object</param> /// <param name="realtime">Realtime processing object</param> /// <remarks>Modify with caution</remarks> public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime) { //Initialize: var backwardsCompatibilityMode = false; var tradebarsType = typeof(TradeBars); var ticksType = typeof(Ticks); var startingPerformance = setup.StartingCapital; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); //Initialize Properties: _frontier = setup.StartingDate; _runtimeError = null; _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = setup.StartingDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: //Algorithm 1.0 Data Accessors. //If the users defined these methods, add them in manually. This allows keeping backwards compatibility to algorithm 1.0. var oldTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnTradeBar", new[] { typeof(Dictionary <string, TradeBar>) }); var oldTicksMethodInfo = (algorithm.GetType()).GetMethod("OnTick", new[] { typeof(Dictionary <string, List <Tick> >) }); //Algorithm 2.0 Data Generics Accessors. //New hidden access to tradebars with custom type. var newTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { tradebarsType }); var newTicksMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { ticksType }); if (newTradeBarsMethodInfo == null && newTicksMethodInfo == null) { backwardsCompatibilityMode = true; if (oldTradeBarsMethodInfo != null) { methodInvokers.Add(tradebarsType, oldTradeBarsMethodInfo.DelegateForCallMethod()); } if (oldTradeBarsMethodInfo != null) { methodInvokers.Add(ticksType, oldTicksMethodInfo.DelegateForCallMethod()); } } else { backwardsCompatibilityMode = false; if (newTradeBarsMethodInfo != null) { methodInvokers.Add(tradebarsType, newTradeBarsMethodInfo.DelegateForCallMethod()); } if (newTicksMethodInfo != null) { methodInvokers.Add(ticksType, newTicksMethodInfo.DelegateForCallMethod()); } } //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //Is we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null) { _runtimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop."); foreach (var newData in DataStream.GetData(feed, setup.StartingDate)) { //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) { break; } //Go over each time stamp we've collected, pass it into the algorithm in order: foreach (var time in newData.Keys) { //Set the time frontier: _frontier = time; //Execute with TimeLimit Monitor: if (Isolator.IsCancellationRequested) { return; } //Refresh the realtime event monitor: realtime.SetTime(time); //Fire EOD if the time packet we just processed is greater if (backtestMode && _previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); if (startingPerformance == 0) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); } startingPerformance = algorithm.Portfolio.TotalPortfolioValue; } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; break; } //Pass in the new time first: algorithm.SetDateTime(time); //Trigger the data events: Invoke the types we have data for: var oldBars = new Dictionary <string, TradeBar>(); var oldTicks = new Dictionary <string, List <Tick> >(); var newBars = new TradeBars(time); var newTicks = new Ticks(time); //Invoke all non-tradebars, non-ticks methods: // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData[time].Keys) { //Data point and config of this point: var dataPoints = newData[time][i]; var config = feed.Subscriptions[i]; //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. foreach (var dataPoint in dataPoints) { //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, dataPoint); //Update registered consolidators for this symbol index for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } switch (config.Type.Name) { case "TradeBar": var bar = dataPoint as TradeBar; try { if (bar != null) { if (backwardsCompatibilityMode) { if (!oldBars.ContainsKey(bar.Symbol)) { oldBars.Add(bar.Symbol, bar); } } else { if (!newBars.ContainsKey(bar.Symbol)) { newBars.Add(bar.Symbol, bar); } } } } catch (Exception err) { Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C")); Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message); } break; case "Tick": var tick = dataPoint as Tick; if (tick != null) { if (backwardsCompatibilityMode) { if (!oldTicks.ContainsKey(tick.Symbol)) { oldTicks.Add(tick.Symbol, new List <Tick>()); } oldTicks[tick.Symbol].Add(tick); } else { if (!newTicks.ContainsKey(tick.Symbol)) { newTicks.Add(tick.Symbol, new List <Tick>()); } newTicks[tick.Symbol].Add(tick); } } break; default: //Send data into the generic algorithm event handlers try { methodInvokers[config.Type](algorithm, dataPoint); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } break; } } } //After we've fired all other events in this second, fire the pricing events: if (backwardsCompatibilityMode) { //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers..."); try { if (oldTradeBarsMethodInfo != null && oldBars.Count > 0) { methodInvokers[tradebarsType](algorithm, oldBars); } if (oldTicksMethodInfo != null && oldTicks.Count > 0) { methodInvokers[ticksType](algorithm, oldTicks); } } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } else { //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers..."); try { if (newTradeBarsMethodInfo != null && newBars.Count > 0) { methodInvokers[tradebarsType](algorithm, newBars); } if (newTicksMethodInfo != null && newTicks.Count > 0) { methodInvokers[ticksType](algorithm, newTicks); } } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; } // End of Time Loop // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach DataStream //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _runtimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); _algorithmState = AlgorithmStatus.RuntimeError; return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); } // End of Run();
/// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="realtime">Realtime processing object</param> /// <param name="token">Cancellation token</param> /// <remarks>Modify with caution</remarks> public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, CancellationToken token) { //Initialize: _dataPointCount = 0; var startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; var delistingTickets = new List <OrderTicket>(); //Initialize Properties: _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = algorithm.StartDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker <Ticks>(algorithm, methodInvokers); // dividend and split events var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker <Splits>(algorithm, methodInvokers); var hasOnDataDelistings = AddMethodInvoker <Delistings>(algorithm, methodInvokers); // Algorithm 3.0 data accessors var hasOnDataSlice = algorithm.GetType().GetMethods() .Where(x => x.Name == "OnData" && x.GetParameters().Length == 1 && x.GetParameters()[0].ParameterType == typeof(Slice)) .FirstOrDefault(x => x.DeclaringType == algorithm.GetType()) != null; //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null && !hasOnDataSlice) { algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } if (genericMethod != null) { methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Trace("AlgorithmManager.Run(): Begin DataStream - Start: " + algorithm.StartDate + " Stop: " + algorithm.EndDate); foreach (var timeSlice in feed.Bridge.GetConsumingEnumerable(token)) { // reset our timer on each loop _currentTimeStepTime = DateTime.UtcNow; //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) { Log.Error(string.Format("AlgorithmManager.Run(): Algorthm state changed to {0} at {1}", _algorithmState, timeSlice.Time)); break; } //Execute with TimeLimit Monitor: if (token.IsCancellationRequested) { Log.Error("AlgorithmManager.Run(): CancellationRequestion at " + timeSlice.Time); return; } var time = timeSlice.Time; var newData = timeSlice.Data; //If we're in backtest mode we need to capture the daily performance. We do this here directly //before updating the algorithm state with the new data from this time step, otherwise we'll //produce incorrect samples (they'll take into account this time step's new price values) if (backtestMode) { //On day-change sample equity and daily performance for statistics calculations if (_previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); //Check for divide by zero if (startingPortfolioValue == 0m) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue; } } //Update algorithm state after capturing performance from previous day //Set the algorithm and real time handler's time algorithm.SetDateTime(time); realtime.SetTime(algorithm.Time); //On each time step push the real time prices to the cashbook so we can have updated conversion rates algorithm.Portfolio.CashBook.Update(newData); //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, newData); // process fill models on the updated data before entering algorithm, applies to all non-market orders transactions.ProcessSynchronousEvents(); if (delistingTickets.Count != 0) { for (int i = 0; i < delistingTickets.Count; i++) { var ticket = delistingTickets[i]; if (ticket.Status == OrderStatus.Filled) { algorithm.Securities.Remove(ticket.Symbol); delistingTickets.RemoveAt(i--); Log.Trace("AlgorithmManager.Run(): Security removed: " + ticket.Symbol); } } } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; Log.Trace("AlgorithmManager.Run(): Algorithm quit requested."); break; } if (algorithm.RunTimeError != null) { _algorithmState = AlgorithmStatus.RuntimeError; Log.Trace(string.Format("AlgorithmManager.Run(): Algorithm encountered a runtime error at {0}. Error: {1}", timeSlice.Time, algorithm.RunTimeError)); break; } // perform margin calls, in live mode we can also use realtime to emit these if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.Now)) { // determine if there are possible margin call orders to be executed bool issueMarginCallWarning; var marginCallOrders = algorithm.Portfolio.ScanForMarginCall(out issueMarginCallWarning); if (marginCallOrders.Count != 0) { try { // tell the algorithm we're about to issue the margin call algorithm.OnMarginCall(marginCallOrders); } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCall: " + err.Message + " STACK >>> " + err.StackTrace); return; } // execute the margin call orders var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var ticket in executedTickets) { algorithm.Error(string.Format("{0} - Executed MarginCallOrder: {1} - Quantity: {2} @ {3}", algorithm.Time, ticket.Symbol, ticket.Quantity, ticket.OrderEvents.Last().FillPrice)); } } // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm else if (issueMarginCallWarning) { try { algorithm.OnMarginCallWarning(); } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCallWarning: " + err.Message + " STACK >>> " + err.StackTrace); } } nextMarginCallTime = time + marginCallFrequency; } //Trigger the data events: Invoke the types we have data for: var newBars = new TradeBars(algorithm.Time); var newTicks = new Ticks(algorithm.Time); var newDividends = new Dividends(algorithm.Time); var newSplits = new Splits(algorithm.Time); var newDelistings = new Delistings(algorithm.Time); //Invoke all non-tradebars, non-ticks methods and build up the TradeBars and Ticks dictionaries // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData.Keys) { //Data point and config of this point: var dataPoints = newData[i]; var config = feed.Subscriptions[i]; //Keep track of how many data points we've processed _dataPointCount += dataPoints.Count; //We don't want to pump data that we added just for currency conversions if (config.IsInternalFeed) { continue; } //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. // Aggregate Dividends and Splits -- invoke portfolio application methods foreach (var dataPoint in dataPoints) { var dividend = dataPoint as Dividend; if (dividend != null) { Log.Trace("AlgorithmManager.Run(): Applying Dividend for " + dividend.Symbol); // if this is a dividend apply to portfolio algorithm.Portfolio.ApplyDividend(dividend); if (hasOnDataDividends) { // and add to our data dictionary to pump into OnData(Dividends data) newDividends.Add(dividend); } continue; } var split = dataPoint as Split; if (split != null) { Log.Trace("AlgorithmManager.Run(): Applying Split for " + split.Symbol); // if this is a split apply to portfolio algorithm.Portfolio.ApplySplit(split); if (hasOnDataSplits) { // and add to our data dictionary to pump into OnData(Splits data) newSplits.Add(split); } continue; } var delisting = dataPoint as Delisting; if (delisting != null) { if (hasOnDataDelistings) { // add to out data dictonary to pump into OnData(Delistings data) newDelistings.Add(delisting); } } //Update registered consolidators for this symbol index try { for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err.Message); return; } // TRADEBAR -- add to our dictionary if (dataPoint.DataType == MarketDataType.TradeBar) { var bar = dataPoint as TradeBar; if (bar != null) { newBars[bar.Symbol] = bar; continue; } } // TICK -- add to our dictionary if (dataPoint.DataType == MarketDataType.Tick) { var tick = dataPoint as Tick; if (tick != null) { List <Tick> ticks; if (!newTicks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List <Tick>(3); newTicks.Add(tick.Symbol, ticks); } ticks.Add(tick); continue; } } // if it was nothing else then it must be custom data // CUSTOM DATA -- invoke on data method //Send data into the generic algorithm event handlers try { MethodInvoker methodInvoker; if (methodInvokers.TryGetValue(config.Type, out methodInvoker)) { methodInvoker(algorithm, dataPoint); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && newDividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, newDividends); } if (hasOnDataSplits && newSplits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, newSplits); } if (hasOnDataDelistings && newDelistings.Count != 0) { methodInvokers[typeof(Delistings)](algorithm, newDelistings); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Dividends/Splits/Delistings: " + err.Message + " STACK >>> " + err.StackTrace); return; } // run the delisting logic after firing delisting events HandleDelistedSymbols(algorithm, newDelistings, delistingTickets); //After we've fired all other events in this second, fire the pricing events: try { if (hasOnDataTradeBars && newBars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, newBars); } if (hasOnDataTicks && newTicks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, newTicks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } // EVENT HANDLER v3.0 -- all data in a single event var slice = new Slice(algorithm.Time, newData.Values.SelectMany(x => x), newBars.Count == 0 ? null : newBars, newTicks.Count == 0 ? null : newTicks, newSplits.Count == 0 ? null : newSplits, newDividends.Count == 0 ? null : newDividends, newDelistings.Count == 0 ? null : newDelistings ); try { algorithm.OnData(slice); } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Slice: " + err.Message + " STACK >>> " + err.StackTrace); return; } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach feed.Bridge.GetConsumingEnumerable // stop timing the loops _currentTimeStepTime = DateTime.MinValue; //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithmState = AlgorithmStatus.RuntimeError; algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Error("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated && _liveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(_previousTime, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } // End of Run();
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="setup">Setup handler object</param> /// <param name="realtime">Realtime processing object</param> /// <remarks>Modify with caution</remarks> public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime) { //Initialize: _dataPointCount = 0; var startingPortfolioValue = setup.StartingCapital; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; //Initialize Properties: _frontier = setup.StartingDate; _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = setup.StartingDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 1.0 data accessors var hasOnTradeBar = AddMethodInvoker <Dictionary <string, TradeBar> >(algorithm, methodInvokers, "OnTradeBar"); var hasOnTick = AddMethodInvoker <Dictionary <string, List <Tick> > >(algorithm, methodInvokers, "OnTick"); // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker <Ticks>(algorithm, methodInvokers); // determine what mode we're in var backwardsCompatibilityMode = !hasOnDataTradeBars && !hasOnDataTicks; // dividend and split events var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker <Splits>(algorithm, methodInvokers); //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null) { algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop."); foreach (var newData in DataStream.GetData(feed, setup.StartingDate)) { //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) { break; } //Go over each time stamp we've collected, pass it into the algorithm in order: foreach (var time in newData.Keys) { //Set the time frontier: _frontier = time; //Execute with TimeLimit Monitor: if (Isolator.IsCancellationRequested) { return; } //If we're in backtest mode we need to capture the daily performance. We do this here directly //before updating the algorithm state with the new data from this time step, otherwise we'll //produce incorrect samples (they'll take into account this time step's new price values) if (backtestMode) { //Refresh the realtime event monitor: //in backtest mode use the algorithms clock as realtime. realtime.SetTime(time); //On day-change sample equity and daily performance for statistics calculations if (_previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); //Check for divide by zero if (startingPortfolioValue == 0m) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue; } } //Update algorithm state after capturing performance from previous day //On each time step push the real time prices to the cashbook so we can have updated conversion rates algorithm.Portfolio.CashBook.Update(newData[time]); //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, newData[time]); // perform margin calls if (time >= nextMarginCallTime) { // determine if there are possible margin call orders to be executed var marginCallOrders = algorithm.Portfolio.ScanForMarginCall(); if (marginCallOrders.Count != 0) { // execute the margin call orders var executedOrders = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var order in executedOrders) { algorithm.Error(string.Format("Executed MarginCallOrder: {0} - Quantity: {1} @ {2}", order.Symbol, order.Quantity, order.Price)); } } nextMarginCallTime = time + marginCallFrequency; } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; break; } //Pass in the new time first: algorithm.SetDateTime(time); //Trigger the data events: Invoke the types we have data for: var oldBars = new Dictionary <string, TradeBar>(); var oldTicks = new Dictionary <string, List <Tick> >(); var newBars = new TradeBars(time); var newTicks = new Ticks(time); var newDividends = new Dividends(time); var newSplits = new Splits(time); //Invoke all non-tradebars, non-ticks methods and build up the TradeBars and Ticks dictionaries // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData[time].Keys) { //Data point and config of this point: var dataPoints = newData[time][i]; var config = feed.Subscriptions[i]; //Keep track of how many data points we've processed _dataPointCount += dataPoints.Count; //We don't want to pump data that we added just for currency conversions if (config.IsInternalFeed) { continue; } //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. // Aggregate Dividends and Splits -- invoke portfolio application methods foreach (var dataPoint in dataPoints) { var dividend = dataPoint as Dividend; if (dividend != null) { // if this is a dividend apply to portfolio algorithm.Portfolio.ApplyDividend(dividend); if (hasOnDataDividends) { // and add to our data dictionary to pump into OnData(Dividends data) newDividends.Add(dividend); } continue; } var split = dataPoint as Split; if (split != null) { // if this is a split apply to portfolio algorithm.Portfolio.ApplySplit(split); if (hasOnDataSplits) { // and add to our data dictionary to pump into OnData(Splits data) newSplits.Add(split); } continue; } //Update registered consolidators for this symbol index try { for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err.Message); return; } // TRADEBAR -- add to our dictionary var bar = dataPoint as TradeBar; if (bar != null) { try { if (backwardsCompatibilityMode) { oldBars[bar.Symbol] = bar; } else { newBars[bar.Symbol] = bar; } } catch (Exception err) { Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C")); Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message); } continue; } // TICK -- add to our dictionary var tick = dataPoint as Tick; if (tick != null) { if (backwardsCompatibilityMode) { List <Tick> ticks; if (!oldTicks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List <Tick>(3); oldTicks.Add(tick.Symbol, ticks); } ticks.Add(tick); } else { List <Tick> ticks; if (!newTicks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List <Tick>(3); newTicks.Add(tick.Symbol, ticks); } ticks.Add(tick); } continue; } // if it was nothing else then it must be custom data // CUSTOM DATA -- invoke on data method //Send data into the generic algorithm event handlers try { methodInvokers[config.Type](algorithm, dataPoint); } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && newDividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, newDividends); } if (hasOnDataSplits && newSplits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, newSplits); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Dividends/Splits: " + err.Message + " STACK >>> " + err.StackTrace); return; } //After we've fired all other events in this second, fire the pricing events: if (backwardsCompatibilityMode) { //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers..."); try { if (hasOnTradeBar && oldBars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, oldBars); } if (hasOnTick && oldTicks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, oldTicks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } else { //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers..."); try { if (hasOnDataTradeBars && newBars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, newBars); } if (hasOnDataTicks && newTicks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, newTicks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of Time Loop } // End of ForEach DataStream //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithmState = AlgorithmStatus.RuntimeError; algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } // End of Run();
/// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="synchronizer">Instance which implements <see cref="ISynchronizer"/>. Used to stream the data</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="realtime">Realtime processing object</param> /// <param name="leanManager">ILeanManager implementation that is updated periodically with the IAlgorithm instance</param> /// <param name="alphas">Alpha handler used to process algorithm generated insights</param> /// <param name="token">Cancellation token</param> /// <remarks>Modify with caution</remarks> public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, ISynchronizer synchronizer, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, ILeanManager leanManager, IAlphaHandler alphas, CancellationToken token) { //Initialize: DataPoints = 0; _algorithm = algorithm; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; var settlementScanFrequency = TimeSpan.FromMinutes(30); var nextSettlementScanTime = DateTime.MinValue; var time = algorithm.StartDate.Date; var pendingDelistings = new List <Delisting>(); var splitWarnings = new List <Split>(); //Initialize Properties: AlgorithmId = job.AlgorithmId; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers); var hasOnDataQuoteBars = AddMethodInvoker <QuoteBars>(algorithm, methodInvokers); var hasOnDataOptionChains = AddMethodInvoker <OptionChains>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker <Ticks>(algorithm, methodInvokers); // dividend and split events var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker <Splits>(algorithm, methodInvokers); var hasOnDataDelistings = AddMethodInvoker <Delistings>(algorithm, methodInvokers); var hasOnDataSymbolChangedEvents = AddMethodInvoker <SymbolChangedEvents>(algorithm, methodInvokers); //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in algorithm.SubscriptionManager.Subscriptions) { //If type is a custom feed, check for a dedicated event handler if (config.IsCustomData) { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } if (genericMethod != null) { methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } } // Schedule a daily event for sampling at midnight every night algorithm.Schedule.On("Daily Sampling", algorithm.Schedule.DateRules.EveryDay(), algorithm.Schedule.TimeRules.Midnight, () => { results.Sample(algorithm.UtcTime); }); //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Trace($"AlgorithmManager.Run(): Begin DataStream - Start: {algorithm.StartDate} Stop: {algorithm.EndDate} Time: {algorithm.Time} Warmup: {algorithm.IsWarmingUp}"); foreach (var timeSlice in Stream(algorithm, synchronizer, results, token)) { // reset our timer on each loop TimeLimit.StartNewTimeStep(); //Check this backtest is still running: if (_algorithm.Status != AlgorithmStatus.Running && _algorithm.RunTimeError == null) { Log.Error($"AlgorithmManager.Run(): Algorithm state changed to {_algorithm.Status} at {timeSlice.Time.ToStringInvariant()}"); break; } //Execute with TimeLimit Monitor: if (token.IsCancellationRequested) { Log.Error($"AlgorithmManager.Run(): CancellationRequestion at {timeSlice.Time.ToStringInvariant()}"); return; } // Update the ILeanManager leanManager.Update(); time = timeSlice.Time; DataPoints += timeSlice.DataPointCount; if (backtestMode && algorithm.Portfolio.TotalPortfolioValue <= 0) { var logMessage = "AlgorithmManager.Run(): Portfolio value is less than or equal to zero, stopping algorithm."; Log.Error(logMessage); results.SystemDebugMessage(logMessage); break; } // If backtesting/warmup, we need to check if there are realtime events in the past // which didn't fire because at the scheduled times there was no data (i.e. markets closed) // and fire them with the correct date/time. realtime.ScanPastEvents(time); //Set the algorithm and real time handler's time algorithm.SetDateTime(time); // the time pulse are just to advance algorithm time, lets shortcut the loop here if (timeSlice.IsTimePulse) { continue; } // Update the current slice before firing scheduled events or any other task algorithm.SetCurrentSlice(timeSlice.Slice); if (timeSlice.Slice.SymbolChangedEvents.Count != 0) { if (hasOnDataSymbolChangedEvents) { methodInvokers[typeof(SymbolChangedEvents)](algorithm, timeSlice.Slice.SymbolChangedEvents); } foreach (var symbol in timeSlice.Slice.SymbolChangedEvents.Keys) { // cancel all orders for the old symbol foreach (var ticket in transactions.GetOpenOrderTickets(x => x.Symbol == symbol)) { ticket.Cancel("Open order cancelled on symbol changed event"); } } } if (timeSlice.SecurityChanges != SecurityChanges.None) { foreach (var security in timeSlice.SecurityChanges.AddedSecurities) { security.IsTradable = true; // uses TryAdd, so don't need to worry about duplicates here algorithm.Securities.Add(security); } var activeSecurities = algorithm.UniverseManager.ActiveSecurities; foreach (var security in timeSlice.SecurityChanges.RemovedSecurities) { if (!activeSecurities.ContainsKey(security.Symbol)) { security.IsTradable = false; } } leanManager.OnSecuritiesChanged(timeSlice.SecurityChanges); realtime.OnSecuritiesChanged(timeSlice.SecurityChanges); results.OnSecuritiesChanged(timeSlice.SecurityChanges); } //Update the securities properties: first before calling user code to avoid issues with data foreach (var update in timeSlice.SecuritiesUpdateData) { var security = update.Target; security.Update(update.Data, update.DataType, update.ContainsFillForwardData); if (!update.IsInternalConfig) { // Send market price updates to the TradeBuilder algorithm.TradeBuilder.SetMarketPrice(security.Symbol, security.Price); } } //Update the securities properties with any universe data if (timeSlice.UniverseData.Count > 0) { foreach (var kvp in timeSlice.UniverseData) { foreach (var data in kvp.Value.Data) { Security security; if (algorithm.Securities.TryGetValue(data.Symbol, out security)) { security.Cache.StoreData(new[] { data }, data.GetType()); } } } } // poke each cash object to update from the recent security data foreach (var cash in algorithm.Portfolio.CashBook.Values.Where(x => x.CurrencyConversion != null)) { cash.Update(); } // security prices got updated algorithm.Portfolio.InvalidateTotalPortfolioValue(); // process fill models on the updated data before entering algorithm, applies to all non-market orders transactions.ProcessSynchronousEvents(); // fire real time events after we've updated based on the new data realtime.SetTime(timeSlice.Time); // process split warnings for options ProcessSplitSymbols(algorithm, splitWarnings, pendingDelistings); //Check if the user's signalled Quit: loop over data until day changes. if (_algorithm.Status != AlgorithmStatus.Running && _algorithm.RunTimeError == null) { Log.Error($"AlgorithmManager.Run(): Algorithm state changed to {_algorithm.Status} at {timeSlice.Time.ToStringInvariant()}"); break; } if (algorithm.RunTimeError != null) { Log.Error($"AlgorithmManager.Run(): Stopping, encountered a runtime error at {algorithm.UtcTime} UTC."); return; } // perform margin calls, in live mode we can also use realtime to emit these if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.UtcNow)) { // determine if there are possible margin call orders to be executed bool issueMarginCallWarning; var marginCallOrders = algorithm.Portfolio.MarginCallModel.GetMarginCallOrders(out issueMarginCallWarning); if (marginCallOrders.Count != 0) { var executingMarginCall = false; try { // tell the algorithm we're about to issue the margin call algorithm.OnMarginCall(marginCallOrders); executingMarginCall = true; // execute the margin call orders var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var ticket in executedTickets) { algorithm.Error($"{algorithm.Time.ToStringInvariant()} - Executed MarginCallOrder: {ticket.Symbol} - " + $"Quantity: {ticket.Quantity.ToStringInvariant()} @ {ticket.AverageFillPrice.ToStringInvariant()}" ); } } catch (Exception err) { algorithm.SetRuntimeError(err, executingMarginCall ? "Portfolio.MarginCallModel.ExecuteMarginCall" : "OnMarginCall"); return; } } // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm else if (issueMarginCallWarning) { try { algorithm.OnMarginCallWarning(); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnMarginCallWarning"); return; } } nextMarginCallTime = time + marginCallFrequency; } // perform check for settlement of unsettled funds if (time >= nextSettlementScanTime || (_liveMode && nextSettlementScanTime > DateTime.UtcNow)) { algorithm.Portfolio.ScanForCashSettlement(algorithm.UtcTime); nextSettlementScanTime = time + settlementScanFrequency; } // before we call any events, let the algorithm know about universe changes if (timeSlice.SecurityChanges != SecurityChanges.None) { try { var algorithmSecurityChanges = new SecurityChanges(timeSlice.SecurityChanges) { // by default for user code we want to filter out custom securities FilterCustomSecurities = true, // by default for user code we want to filter out internal securities FilterInternalSecurities = true }; algorithm.OnSecuritiesChanged(algorithmSecurityChanges); algorithm.OnFrameworkSecuritiesChanged(algorithmSecurityChanges); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnSecuritiesChanged"); return; } } // apply dividends foreach (var dividend in timeSlice.Slice.Dividends.Values) { Log.Debug($"AlgorithmManager.Run(): {algorithm.Time}: Applying Dividend: {dividend}"); Security security = null; if (_liveMode && algorithm.Securities.TryGetValue(dividend.Symbol, out security)) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Pre-Dividend: {dividend}. " + $"Security Holdings: {security.Holdings.Quantity} Account Currency Holdings: " + $"{algorithm.Portfolio.CashBook[algorithm.AccountCurrency].Amount}"); } var mode = algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(dividend.Symbol) .DataNormalizationMode(); // apply the dividend event to the portfolio algorithm.Portfolio.ApplyDividend(dividend, _liveMode, mode); if (_liveMode && security != null) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Post-Dividend: {dividend}. Security " + $"Holdings: {security.Holdings.Quantity} Account Currency Holdings: " + $"{algorithm.Portfolio.CashBook[algorithm.AccountCurrency].Amount}"); } } // apply splits foreach (var split in timeSlice.Slice.Splits.Values) { try { // only process split occurred events (ignore warnings) if (split.Type != SplitType.SplitOccurred) { continue; } Log.Debug($"AlgorithmManager.Run(): {algorithm.Time}: Applying Split for {split.Symbol}"); Security security = null; if (_liveMode && algorithm.Securities.TryGetValue(split.Symbol, out security)) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Pre-Split for {split}. Security Price: {security.Price} Holdings: {security.Holdings.Quantity}"); } var mode = algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(split.Symbol) .DataNormalizationMode(); // apply the split event to the portfolio algorithm.Portfolio.ApplySplit(split, _liveMode, mode); if (_liveMode && security != null) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Post-Split for {split}. Security Price: {security.Price} Holdings: {security.Holdings.Quantity}"); } // apply the split to open orders as well in raw mode, all other modes are split adjusted if (_liveMode || mode == DataNormalizationMode.Raw) { // in live mode we always want to have our order match the order at the brokerage, so apply the split to the orders var openOrders = transactions.GetOpenOrderTickets(ticket => ticket.Symbol == split.Symbol); algorithm.BrokerageModel.ApplySplit(openOrders.ToList(), split); } } catch (Exception err) { algorithm.SetRuntimeError(err, "Split event"); return; } } //Update registered consolidators for this symbol index try { if (timeSlice.ConsolidatorUpdateData.Count > 0) { var timeKeeper = algorithm.TimeKeeper; foreach (var update in timeSlice.ConsolidatorUpdateData) { var localTime = timeKeeper.GetLocalTimeKeeper(update.Target.ExchangeTimeZone).LocalTime; var consolidators = update.Target.Consolidators; foreach (var consolidator in consolidators) { foreach (var dataPoint in update.Data) { // only push data into consolidators on the native, subscribed to resolution if (EndTimeIsInNativeResolution(update.Target, dataPoint.EndTime)) { consolidator.Update(dataPoint); } } // scan for time after we've pumped all the data through for this consolidator consolidator.Scan(localTime); } } } } catch (Exception err) { algorithm.SetRuntimeError(err, "Consolidators update"); return; } // fire custom event handlers foreach (var update in timeSlice.CustomData) { MethodInvoker methodInvoker; if (!methodInvokers.TryGetValue(update.DataType, out methodInvoker)) { continue; } try { foreach (var dataPoint in update.Data) { if (update.DataType.IsInstanceOfType(dataPoint)) { methodInvoker(algorithm, dataPoint); } } } catch (Exception err) { algorithm.SetRuntimeError(err, "Custom Data"); return; } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && timeSlice.Slice.Dividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, timeSlice.Slice.Dividends); } if (hasOnDataSplits && timeSlice.Slice.Splits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, timeSlice.Slice.Splits); } if (hasOnDataDelistings && timeSlice.Slice.Delistings.Count != 0) { methodInvokers[typeof(Delistings)](algorithm, timeSlice.Slice.Delistings); } } catch (Exception err) { algorithm.SetRuntimeError(err, "Dividends/Splits/Delistings"); return; } // Only track pending delistings in non-live mode. if (!algorithm.LiveMode) { // Keep this up to date even though we don't process delistings here anymore foreach (var delisting in timeSlice.Slice.Delistings.Values) { if (delisting.Type == DelistingType.Warning) { // Store our delistings warnings because they are still used by ProcessSplitSymbols above pendingDelistings.Add(delisting); } else { // If we have an actual delisting event, remove it from pending delistings var index = pendingDelistings.FindIndex(x => x.Symbol == delisting.Symbol); if (index != -1) { pendingDelistings.RemoveAt(index); } } } } // run split logic after firing split events HandleSplitSymbols(timeSlice.Slice.Splits, splitWarnings); //After we've fired all other events in this second, fire the pricing events: try { if (hasOnDataTradeBars && timeSlice.Slice.Bars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, timeSlice.Slice.Bars); } if (hasOnDataQuoteBars && timeSlice.Slice.QuoteBars.Count > 0) { methodInvokers[typeof(QuoteBars)](algorithm, timeSlice.Slice.QuoteBars); } if (hasOnDataOptionChains && timeSlice.Slice.OptionChains.Count > 0) { methodInvokers[typeof(OptionChains)](algorithm, timeSlice.Slice.OptionChains); } if (hasOnDataTicks && timeSlice.Slice.Ticks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, timeSlice.Slice.Ticks); } } catch (Exception err) { algorithm.SetRuntimeError(err, "methodInvokers"); return; } try { if (timeSlice.Slice.HasData) { // EVENT HANDLER v3.0 -- all data in a single event algorithm.OnData(timeSlice.Slice); } // always turn the crank on this method to ensure universe selection models function properly on day changes w/out data algorithm.OnFrameworkData(timeSlice.Slice); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnData"); return; } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); // sample alpha charts now that we've updated time/price information and after transactions // are processed so that insights closed because of new order based insights get updated alphas.ProcessSynchronousEvents(); // send the alpha statistics to the result handler for storage/transmit with the result packets results.SetAlphaRuntimeStatistics(alphas.RuntimeStatistics); // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); // poke the algorithm at the end of each time step algorithm.OnEndOfTimeStep(); } // End of ForEach feed.Bridge.GetConsumingEnumerable // stop timing the loops TimeLimit.StopEnforcingTimeLimit(); //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnEndOfAlgorithm"); return; } // final processing now that the algorithm has completed alphas.ProcessSynchronousEvents(); // send the final alpha statistics to the result handler for storage/transmit with the result packets results.SetAlphaRuntimeStatistics(alphas.RuntimeStatistics); // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithm.Status == AlgorithmStatus.Liquidated && _liveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithm.Status == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithm.Status == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(AlgorithmStatus.Completed); SetStatus(AlgorithmStatus.Completed); //Take final samples: results.Sample(time); } // End of Run();