public void Initialize(AlgorithmNodePacket job,
     IMessagingHandler messagingHandler,
     IApi api,
     IDataFeed dataFeed,
     ISetupHandler setupHandler,
     ITransactionHandler transactionHandler)
 {
     _job = job;
 }
 /********************************************************
 * PUBLIC CONSTRUCTOR
 *********************************************************/
 /// <summary>
 /// Initialize the realtime event handler with all information required for triggering daily events.
 /// </summary>
 public LiveTradingRealTimeHandler(IAlgorithm algorithm, IDataFeed feed, IResultHandler results, IBrokerage brokerage, AlgorithmNodePacket job)
 {
     //Initialize:
     _algorithm = algorithm;
     _events = new List<RealTimeEvent>();
     _today = new Dictionary<SecurityType, MarketToday>();
     _feed = feed;
     _results = results;
 }
Esempio n. 3
0
        public Simulator(ILimitOrderBook limitOrderBook, IDataFeed dataCommunicationsModule, ITradeInterface orderCommunicationsModule)
        {
            _limitOrderBook = limitOrderBook;
            _dataCommunicationsModule = dataCommunicationsModule;
            _orderCommunicationsModule = orderCommunicationsModule;

            _orderCommunicationsModule.OnOrder += _orderCommunicationsModule_OnOrder;
            _orderCommunicationsModule.OnOrderCancellation += _orderCommunicationsModule_OnOrderCancellation;
        }
        public SingleThreadMarket(IDataFeed dataFeed, IOrderManager orderManager)
        {
            _dataFeed = dataFeed;
            _orderManager = orderManager;

            _dataFeed.Second += time =>
            {
                lock (_syncRoot)
                {
                    if (Second != null) Second(time);
                }
            };
        }
Esempio n. 5
0
            public OhlcGenerator(IDataFeed dataFeed, PriceSelector selector, TimeSpan duration, TimeSpan? offset = null)
            {
                OhlcBar bar = null;
                selector.PriceTick += price =>
                {
                    if (!price.HasValue)
                    {
                        return;
                    }

                    var priceValue = price.Value;

                    if (bar == null)
                    {
                        bar = new OhlcBar
                        {
                            Open = priceValue,
                            High = priceValue,
                            Low = priceValue,
                            Close = priceValue
                        };
                        if (Open != null) Open(priceValue);
                    }
                    bar.Close = priceValue;
                    bar.High = priceValue > bar.High ? priceValue : bar.High;
                    bar.Low = priceValue < bar.Low ? priceValue : bar.Low;
                };

                Candle lastCandle = null;
                NewCandle += ohlcCandle =>
                {
                    lastCandle = ohlcCandle;
                };

                DateTime? nextPeriodStart = null;
                dataFeed.Second += time =>
                {
                    if (!nextPeriodStart.HasValue)
                    {
                        nextPeriodStart = Misc.GetNextPeriodStart(time, duration, offset);
                    }

                    if (time < nextPeriodStart) return;

                    var thisCandle = bar != null ? bar.GetCandle() : lastCandle;
                    if (NewCandle != null) NewCandle(thisCandle);
                    lastCandle = thisCandle;
                    bar = null;
                    nextPeriodStart += duration;
                };
            }
 /// <summary>
 /// Initializes a new instance of the <see cref="LeanEngineAlgorithmHandlers"/> class from the specified handlers
 /// </summary>
 /// <param name="results">The result handler for communicating results from the algorithm</param>
 /// <param name="setup">The setup handler used to initialize algorithm state</param>
 /// <param name="dataFeed">The data feed handler used to pump data to the algorithm</param>
 /// <param name="transactions">The transaction handler used to process orders from the algorithm</param>
 /// <param name="realTime">The real time handler used to process real time events</param>
 /// <param name="historyProvider">The history provider used to process historical data requests</param>
 /// <param name="commandQueue">The command queue handler used to receive external commands for the algorithm</param>
 /// <param name="mapFileProvider">The map file provider used to retrieve map files for the data feed</param>
 public LeanEngineAlgorithmHandlers(IResultHandler results,
     ISetupHandler setup,
     IDataFeed dataFeed,
     ITransactionHandler transactions,
     IRealTimeHandler realTime,
     IHistoryProvider historyProvider,
     ICommandQueueHandler commandQueue,
     IMapFileProvider mapFileProvider
     )
 {
     if (results == null)
     {
         throw new ArgumentNullException("results");
     }
     if (setup == null)
     {
         throw new ArgumentNullException("setup");
     }
     if (dataFeed == null)
     {
         throw new ArgumentNullException("dataFeed");
     }
     if (transactions == null)
     {
         throw new ArgumentNullException("transactions");
     }
     if (realTime == null)
     {
         throw new ArgumentNullException("realTime");
     }
     if (historyProvider == null)
     {
         throw new ArgumentNullException("realTime");
     }
     if (commandQueue == null)
     {
         throw new ArgumentNullException("commandQueue");
     }
     if (mapFileProvider == null)
     {
         throw new ArgumentNullException("mapFileProvider");
     }
     _results = results;
     _setup = setup;
     _dataFeed = dataFeed;
     _transactions = transactions;
     _realTime = realTime;
     _historyProvider = historyProvider;
     _commandQueue = commandQueue;
     _mapFileProvider = mapFileProvider;
 }
        public ExceptionHandlingMarket(IDataFeed dataFeed, IOrderManager orderManager, Action<Exception> handler)
        {
            _handler = handler;
            _dataFeed = dataFeed;
            _orderManager = orderManager;

            _dataFeed.Second += time =>
            {
                try
                {
                    if (Second != null) Second(time);
                }
                catch (Exception ex)
                {
                    _handler(ex);
                }
            };
        }
Esempio n. 8
0
        /********************************************************
        * CLASS PROPERTIES
        *********************************************************/
        /********************************************************
        * CLASS METHODS
        *********************************************************/
        /// <summary>
        /// Process over the datafeed cross thread bridges to generate an enumerable sorted collection of the data, ready for a consumer
        /// to use and already synchronized in time.
        /// </summary>
        /// <param name="feed">DataFeed object</param>
        /// <param name="frontierOrigin">Starting date for the data feed</param>
        /// <returns></returns>
        public static IEnumerable<SortedDictionary<DateTime, Dictionary<int, List<BaseData>>>> GetData(IDataFeed feed, DateTime frontierOrigin)
        {
            //Initialize:
            long earlyBirdTicks = 0;
            var increment = TimeSpan.FromSeconds(1);
            _subscriptions = feed.Subscriptions.Count;
            var frontier = frontierOrigin;
            var nextEmitTime = DateTime.MinValue;
            var newData = new SortedDictionary<DateTime, Dictionary<int, List<BaseData>>>();

            //Wait for datafeeds to be ready, wait for first data to arrive:
            while (feed.Bridge.Length != _subscriptions) Thread.Sleep(100);

            //Get all data in queues: return as a sorted dictionary:
            while (!feed.EndOfBridges)
            {
                //Reset items which are not fill forward:
                earlyBirdTicks = 0;
                newData = new SortedDictionary<DateTime, Dictionary<int, List<BaseData>>>();

                // spin wait until the feed catches up to our frontier
                WaitForDataOrEndOfBridges(feed, frontier);

                for (var i = 0; i < _subscriptions; i++)
                {
                    //If there's data, download a little of it: Put 100 items of each type into the queue maximum
                    while (feed.Bridge[i].Count > 0)
                    {
                        //Log.Debug("DataStream.GetData(): Bridge has data: Bridge Count:" + feed.Bridge[i].Count.ToString());

                        //Look at first item on list, leave it there until time passes this item.
                        List<BaseData> result;
                        if (!feed.Bridge[i].TryPeek(out result) || (result.Count > 0 && result[0].Time > frontier))
                        {
                            if (result != null)
                            {
                                //Log.Debug("DataStream.GetData(): Result != null: " + result[0].Time.ToShortTimeString());
                                if (earlyBirdTicks == 0 || earlyBirdTicks > result[0].Time.Ticks) earlyBirdTicks = result[0].Time.Ticks;
                            }
                            break;
                        }

                        //Pull a grouped time list out of the bridge
                        List<BaseData> dataPoints;
                        if (feed.Bridge[i].TryDequeue(out dataPoints))
                        {
                            //Log.Debug("DataStream.GetData(): Bridge has data: DataPoints Count: " + dataPoints.Count);
                            foreach (var point in dataPoints)
                            {
                                //Add the new data point to the list of generic points in this timestamp.
                                if (!newData.ContainsKey(point.Time)) newData.Add(point.Time, new Dictionary<int, List<BaseData>>());
                                if (!newData[point.Time].ContainsKey(i)) newData[point.Time].Add(i, new List<BaseData>());
                                //Add the data point:
                                newData[point.Time][i].Add(point);
                                //Log.Debug("DataStream.GetData(): Added Datapoint: Time:" + point.Time.ToShortTimeString() + " Symbol: " + point.Symbol);
                            }
                        }
                        else
                        {
                            //Should never fail:
                            Log.Error("DataStream.GetData(): Failed to dequeue bridge item");
                        }
                    }
                }

                //Update the frontier and start again.
                if (earlyBirdTicks > 0)
                {
                    //Seek forward in time to next data event from stream: there's nothing here for us to do now: why loop over empty seconds?
                    frontier = (new DateTime(earlyBirdTicks));
                }
                else
                {
                    frontier += increment;
                }

                //Submit the next data array, even if there's no data, allow emits every second to allow event handling (liquidate/stop/ect...)
                if (newData.Count > 0 || (Engine.LiveMode && DateTime.Now > nextEmitTime))
                {
                    nextEmitTime = DateTime.Now + TimeSpan.FromSeconds(1);
                    yield return newData;
                }
            }
            Log.Trace("DataStream.GetData(): All Streams Completed.");
        }
Esempio n. 9
0
        /// <summary>
        /// Creates a new instance of the DataManager
        /// </summary>
        public DataManager(
            IDataFeed dataFeed,
            UniverseSelection universeSelection,
            IAlgorithm algorithm,
            ITimeKeeper timeKeeper,
            MarketHoursDatabase marketHoursDatabase)
        {
            _dataFeed         = dataFeed;
            UniverseSelection = universeSelection;
            UniverseSelection.SetDataManager(this);
            _algorithmSettings   = algorithm.Settings;
            AvailableDataTypes   = SubscriptionManager.DefaultDataTypes();
            _timeKeeper          = timeKeeper;
            _marketHoursDatabase = marketHoursDatabase;
            _liveMode            = algorithm.LiveMode;

            var liveStart = DateTime.UtcNow;

            // wire ourselves up to receive notifications when universes are added/removed
            algorithm.UniverseManager.CollectionChanged += (sender, args) =>
            {
                switch (args.Action)
                {
                case NotifyCollectionChangedAction.Add:
                    foreach (var universe in args.NewItems.OfType <Universe>())
                    {
                        var config = universe.Configuration;
                        var start  = algorithm.LiveMode ? liveStart : algorithm.UtcTime;

                        var end = algorithm.LiveMode ? Time.EndOfTime
                                : algorithm.EndDate.ConvertToUtc(algorithm.TimeZone);

                        Security security;
                        if (!algorithm.Securities.TryGetValue(config.Symbol, out security))
                        {
                            // create a canonical security object if it doesn't exist
                            security = new Security(
                                _marketHoursDatabase.GetExchangeHours(config),
                                config,
                                algorithm.Portfolio.CashBook[CashBook.AccountCurrency],
                                SymbolProperties.GetDefault(CashBook.AccountCurrency),
                                algorithm.Portfolio.CashBook
                                );
                        }
                        AddSubscription(
                            new SubscriptionRequest(true,
                                                    universe,
                                                    security,
                                                    config,
                                                    start,
                                                    end));
                    }
                    break;

                case NotifyCollectionChangedAction.Remove:
                    foreach (var universe in args.OldItems.OfType <Universe>())
                    {
                        RemoveSubscription(universe.Configuration, universe);
                    }
                    break;

                default:
                    throw new NotImplementedException("The specified action is not implemented: " + args.Action);
                }
            };
        }
Esempio n. 10
0
 /// <summary>
 /// Initialize the result handler with this result packet.
 /// </summary>
 /// <param name="job">Algorithm job packet for this result handler</param>
 /// <param name="messagingHandler">The handler responsible for communicating messages to listeners</param>
 /// <param name="api">The api instance used for handling logs</param>
 /// <param name="dataFeed"></param>
 /// <param name="setupHandler"></param>
 /// <param name="transactionHandler"></param>
 public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
 {
     _api = api;
     _messagingHandler   = messagingHandler;
     _transactionHandler = transactionHandler;
     _job = (BacktestNodePacket)job;
     if (_job == null)
     {
         throw new Exception("BacktestingResultHandler.Constructor(): Submitted Job type invalid.");
     }
     _compileId  = _job.CompileId;
     _backtestId = _job.BacktestId;
 }
Esempio n. 11
0
        /// <summary>
        /// Select the realtime event handler set in the job.
        /// </summary>
        private static IRealTimeHandler GetRealTimeHandler(IAlgorithm algorithm, IBrokerage brokerage, IDataFeed feed, IResultHandler results, AlgorithmNodePacket job)
        {
            var rth = default(IRealTimeHandler);

            switch (job.RealTimeEndpoint)
            {
            //Don't fire based on system time but virtualized backtesting time.
            case RealTimeEndpoint.Backtesting:
                Log.Trace("Engine.GetRealTimeHandler(): Selected Backtesting RealTimeEvent Handler");
                rth = new BacktestingRealTimeHandler(algorithm, job);
                break;

            // Fire events based on real system clock time.
            case RealTimeEndpoint.LiveTrading:
                Log.Trace("Engine.GetRealTimeHandler(): Selected LiveTrading RealTimeEvent Handler");
                rth = new LiveTradingRealTimeHandler(algorithm, feed, results);
                break;
            }
            return(rth);
        }
Esempio n. 12
0
        /// <summary>
        /// Launch the algorithm manager to run this strategy
        /// </summary>
        /// <param name="job">Algorithm job</param>
        /// <param name="algorithm">Algorithm instance</param>
        /// <param name="feed">Datafeed object</param>
        /// <param name="transactions">Transaction manager object</param>
        /// <param name="results">Result handler object</param>
        /// <param name="realtime">Realtime processing object</param>
        /// <param name="token">Cancellation token</param>
        /// <remarks>Modify with caution</remarks>
        public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, CancellationToken token)
        {
            //Initialize:
            _dataPointCount = 0;
            var startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue;
            var backtestMode           = (job.Type == PacketType.BacktestNode);
            var methodInvokers         = new Dictionary <Type, MethodInvoker>();
            var marginCallFrequency    = TimeSpan.FromMinutes(5);
            var nextMarginCallTime     = DateTime.MinValue;

            //Initialize Properties:
            _algorithmId    = job.AlgorithmId;
            _algorithmState = AlgorithmStatus.Running;
            _previousTime   = algorithm.StartDate.Date;

            //Create the method accessors to push generic types into algorithm: Find all OnData events:

            // Algorithm 2.0 data accessors
            var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers);
            var hasOnDataTicks     = AddMethodInvoker <Ticks>(algorithm, methodInvokers);

            // dividend and split events
            var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers);
            var hasOnDataSplits    = AddMethodInvoker <Splits>(algorithm, methodInvokers);

            // Algorithm 3.0 data accessors
            var hasOnDataSlice = algorithm.GetType().GetMethods()
                                 .Where(x => x.Name == "OnData" && x.GetParameters().Length == 1 && x.GetParameters()[0].ParameterType == typeof(Slice))
                                 .FirstOrDefault(x => x.DeclaringType == algorithm.GetType()) != null;

            //Go through the subscription types and create invokers to trigger the event handlers for each custom type:
            foreach (var config in feed.Subscriptions)
            {
                //If type is a tradebar, combine tradebars and ticks into unified array:
                if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick")
                {
                    //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. }
                    var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type });

                    //If we already have this Type-handler then don't add it to invokers again.
                    if (methodInvokers.ContainsKey(config.Type))
                    {
                        continue;
                    }

                    //If we couldnt find the event handler, let the user know we can't fire that event.
                    if (genericMethod == null && !hasOnDataSlice)
                    {
                        algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) {  }");
                        _algorithmState        = AlgorithmStatus.RuntimeError;
                        return;
                    }
                    if (genericMethod != null)
                    {
                        methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod());
                    }
                }
            }

            //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm.
            Log.Trace("AlgorithmManager.Run(): Begin DataStream - Start: " + algorithm.StartDate + " Stop: " + algorithm.EndDate);
            foreach (var timeSlice in feed.Bridge.GetConsumingEnumerable(token))
            {
                // reset our timer on each loop
                _currentTimeStepTime = DateTime.UtcNow;

                //Check this backtest is still running:
                if (_algorithmState != AlgorithmStatus.Running)
                {
                    Log.Error(string.Format("AlgorithmManager.Run(): Algorthm state changed to {0} at {1}", _algorithmState, timeSlice.Time));
                    break;
                }

                //Execute with TimeLimit Monitor:
                if (token.IsCancellationRequested)
                {
                    Log.Error("AlgorithmManager.Run(): CancellationRequestion at " + timeSlice.Time);
                    return;
                }

                var time    = timeSlice.Time;
                var newData = timeSlice.Data;

                //If we're in backtest mode we need to capture the daily performance. We do this here directly
                //before updating the algorithm state with the new data from this time step, otherwise we'll
                //produce incorrect samples (they'll take into account this time step's new price values)
                if (backtestMode)
                {
                    //On day-change sample equity and daily performance for statistics calculations
                    if (_previousTime.Date != time.Date)
                    {
                        //Sample the portfolio value over time for chart.
                        results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));

                        //Check for divide by zero
                        if (startingPortfolioValue == 0m)
                        {
                            results.SamplePerformance(_previousTime.Date, 0);
                        }
                        else
                        {
                            results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10));
                        }
                        startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue;
                    }
                }

                //Update algorithm state after capturing performance from previous day

                //Set the algorithm and real time handler's time
                algorithm.SetDateTime(time);
                realtime.SetTime(time);

                //On each time step push the real time prices to the cashbook so we can have updated conversion rates
                algorithm.Portfolio.CashBook.Update(newData);

                //Update the securities properties: first before calling user code to avoid issues with data
                algorithm.Securities.Update(time, newData);

                // process fill models on the updated data before entering algorithm, applies to all non-market orders
                transactions.ProcessSynchronousEvents();

                //Check if the user's signalled Quit: loop over data until day changes.
                if (algorithm.GetQuit())
                {
                    _algorithmState = AlgorithmStatus.Quit;
                    Log.Trace("AlgorithmManager.Run(): Algorithm quit requested.");
                    break;
                }
                if (algorithm.RunTimeError != null)
                {
                    _algorithmState = AlgorithmStatus.RuntimeError;
                    Log.Trace(string.Format("AlgorithmManager.Run(): Algorithm encountered a runtime error at {0}. Error: {1}", timeSlice.Time, algorithm.RunTimeError));
                    break;
                }

                // perform margin calls, in live mode we can also use realtime to emit these
                if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.Now))
                {
                    // determine if there are possible margin call orders to be executed
                    bool issueMarginCallWarning;
                    var  marginCallOrders = algorithm.Portfolio.ScanForMarginCall(out issueMarginCallWarning);
                    if (marginCallOrders.Count != 0)
                    {
                        try
                        {
                            // tell the algorithm we're about to issue the margin call
                            algorithm.OnMarginCall(marginCallOrders);
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithmState        = AlgorithmStatus.RuntimeError;
                            Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCall: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }

                        // execute the margin call orders
                        var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders);
                        foreach (var ticket in executedTickets)
                        {
                            algorithm.Error(string.Format("{0} - Executed MarginCallOrder: {1} - Quantity: {2} @ {3}", algorithm.Time, ticket.Symbol, ticket.Quantity, ticket.OrderEvents.Last().FillPrice));
                        }
                    }
                    // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm
                    else if (issueMarginCallWarning)
                    {
                        try
                        {
                            algorithm.OnMarginCallWarning();
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithmState        = AlgorithmStatus.RuntimeError;
                            Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCallWarning: " + err.Message + " STACK >>> " + err.StackTrace);
                        }
                    }

                    nextMarginCallTime = time + marginCallFrequency;
                }

                //Trigger the data events: Invoke the types we have data for:
                var newBars      = new TradeBars(time);
                var newTicks     = new Ticks(time);
                var newDividends = new Dividends(time);
                var newSplits    = new Splits(time);

                //Invoke all non-tradebars, non-ticks methods and build up the TradeBars and Ticks dictionaries
                // --> i == Subscription Configuration Index, so we don't need to compare types.
                foreach (var i in newData.Keys)
                {
                    //Data point and config of this point:
                    var dataPoints = newData[i];
                    var config     = feed.Subscriptions[i];

                    //Keep track of how many data points we've processed
                    _dataPointCount += dataPoints.Count;

                    //We don't want to pump data that we added just for currency conversions
                    if (config.IsInternalFeed)
                    {
                        continue;
                    }

                    //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop.
                    //  Aggregate Dividends and Splits -- invoke portfolio application methods
                    foreach (var dataPoint in dataPoints)
                    {
                        var dividend = dataPoint as Dividend;
                        if (dividend != null)
                        {
                            Log.Trace("AlgorithmManager.Run(): Applying Dividend for " + dividend.Symbol);
                            // if this is a dividend apply to portfolio
                            algorithm.Portfolio.ApplyDividend(dividend);
                            if (hasOnDataDividends)
                            {
                                // and add to our data dictionary to pump into OnData(Dividends data)
                                newDividends.Add(dividend);
                            }
                            continue;
                        }

                        var split = dataPoint as Split;
                        if (split != null)
                        {
                            Log.Trace("AlgorithmManager.Run(): Applying Split for " + split.Symbol);

                            // if this is a split apply to portfolio
                            algorithm.Portfolio.ApplySplit(split);
                            if (hasOnDataSplits)
                            {
                                // and add to our data dictionary to pump into OnData(Splits data)
                                newSplits.Add(split);
                            }
                            continue;
                        }

                        //Update registered consolidators for this symbol index
                        try
                        {
                            for (var j = 0; j < config.Consolidators.Count; j++)
                            {
                                config.Consolidators[j].Update(dataPoint);
                            }
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithmState        = AlgorithmStatus.RuntimeError;
                            Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err.Message);
                            return;
                        }

                        // TRADEBAR -- add to our dictionary
                        if (dataPoint.DataType == MarketDataType.TradeBar)
                        {
                            var bar = dataPoint as TradeBar;
                            if (bar != null)
                            {
                                newBars[bar.Symbol] = bar;
                                continue;
                            }
                        }

                        // TICK -- add to our dictionary
                        if (dataPoint.DataType == MarketDataType.Tick)
                        {
                            var tick = dataPoint as Tick;
                            if (tick != null)
                            {
                                List <Tick> ticks;
                                if (!newTicks.TryGetValue(tick.Symbol, out ticks))
                                {
                                    ticks = new List <Tick>(3);
                                    newTicks.Add(tick.Symbol, ticks);
                                }
                                ticks.Add(tick);
                                continue;
                            }
                        }

                        // if it was nothing else then it must be custom data

                        // CUSTOM DATA -- invoke on data method
                        //Send data into the generic algorithm event handlers
                        try
                        {
                            MethodInvoker methodInvoker;
                            if (methodInvokers.TryGetValue(config.Type, out methodInvoker))
                            {
                                methodInvoker(algorithm, dataPoint);
                            }
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithmState        = AlgorithmStatus.RuntimeError;
                            Log.Error("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }
                }

                try
                {
                    // fire off the dividend and split events before pricing events
                    if (hasOnDataDividends && newDividends.Count != 0)
                    {
                        methodInvokers[typeof(Dividends)](algorithm, newDividends);
                    }
                    if (hasOnDataSplits && newSplits.Count != 0)
                    {
                        methodInvokers[typeof(Splits)](algorithm, newSplits);
                    }
                }
                catch (Exception err)
                {
                    algorithm.RunTimeError = err;
                    _algorithmState        = AlgorithmStatus.RuntimeError;
                    Log.Error("AlgorithmManager.Run(): RuntimeError: Dividends/Splits: " + err.Message + " STACK >>> " + err.StackTrace);
                    return;
                }

                //After we've fired all other events in this second, fire the pricing events:
                try
                {
                    if (hasOnDataTradeBars && newBars.Count > 0)
                    {
                        methodInvokers[typeof(TradeBars)](algorithm, newBars);
                    }
                    if (hasOnDataTicks && newTicks.Count > 0)
                    {
                        methodInvokers[typeof(Ticks)](algorithm, newTicks);
                    }
                }
                catch (Exception err)
                {
                    algorithm.RunTimeError = err;
                    _algorithmState        = AlgorithmStatus.RuntimeError;
                    Log.Error("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                    return;
                }

                // EVENT HANDLER v3.0 -- all data in a single event
                var slice = new Slice(time, newData.Values.SelectMany(x => x),
                                      newBars.Count == 0 ? null : newBars,
                                      newTicks.Count == 0 ? null : newTicks,
                                      newSplits.Count == 0 ? null : newSplits,
                                      newDividends.Count == 0 ? null : newDividends
                                      );

                algorithm.OnData(slice);

                //If its the historical/paper trading models, wait until market orders have been "filled"
                // Manually trigger the event handler to prevent thread switch.
                transactions.ProcessSynchronousEvents();

                //Save the previous time for the sample calculations
                _previousTime = time;

                // Process any required events of the results handler such as sampling assets, equity, or stock prices.
                results.ProcessSynchronousEvents();
            } // End of ForEach feed.Bridge.GetConsumingEnumerable

            // stop timing the loops
            _currentTimeStepTime = DateTime.MinValue;

            //Stream over:: Send the final packet and fire final events:
            Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm...");
            try
            {
                algorithm.OnEndOfAlgorithm();
            }
            catch (Exception err)
            {
                _algorithmState        = AlgorithmStatus.RuntimeError;
                algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException);
                Log.Error("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace);
                return;
            }

            // Process any required events of the results handler such as sampling assets, equity, or stock prices.
            results.ProcessSynchronousEvents(forceProcess: true);

            //Liquidate Holdings for Calculations:
            if (_algorithmState == AlgorithmStatus.Liquidated && _liveMode)
            {
                Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings...");
                algorithm.Liquidate();
                results.LogMessage("Algorithm Liquidated");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated);
            }

            //Manually stopped the algorithm
            if (_algorithmState == AlgorithmStatus.Stopped)
            {
                Log.Trace("AlgorithmManager.Run(): Stopping algorithm...");
                results.LogMessage("Algorithm Stopped");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped);
            }

            //Backtest deleted.
            if (_algorithmState == AlgorithmStatus.Deleted)
            {
                Log.Trace("AlgorithmManager.Run(): Deleting algorithm...");
                results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request.");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted);
            }

            //Algorithm finished, send regardless of commands:
            results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed);

            //Take final samples:
            results.SampleRange(algorithm.GetChartUpdates());
            results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));
            results.SamplePerformance(_previousTime, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10));
        } // End of Run();
Esempio n. 13
0
 /// <summary>
 /// Initializes a new instance of the <see cref="UniverseSelection"/> class
 /// </summary>
 /// <param name="dataFeed">The data feed to add/remove subscriptions from</param>
 /// <param name="algorithm">The algorithm to add securities to</param>
 public UniverseSelection(IDataFeed dataFeed, IAlgorithm algorithm)
 {
     _dataFeed  = dataFeed;
     _algorithm = algorithm;
 }
Esempio n. 14
0
        /********************************************************
        * CLASS METHODS
        *********************************************************/
        /// <summary>
        /// Primary Analysis Thread:
        /// </summary>
        public static void Main(string[] args)
        {
            //Initialize:
            var algorithmPath = "";
            string mode = "RELEASE";
            AlgorithmNodePacket job = null;
            var algorithm = default(IAlgorithm);
            var startTime = DateTime.Now;
            Log.LogHandler = Composer.Instance.GetExportedValueByTypeName<ILogHandler>(Config.Get("log-handler", "CompositeLogHandler"));

            #if DEBUG
                mode = "DEBUG";
            #endif

            //Name thread for the profiler:
            Thread.CurrentThread.Name = "Algorithm Analysis Thread";
            Log.Trace("Engine.Main(): LEAN ALGORITHMIC TRADING ENGINE v" + Constants.Version + " Mode: " + mode);
            Log.Trace("Engine.Main(): Started " + DateTime.Now.ToShortTimeString());
            Log.Trace("Engine.Main(): Memory " + OS.ApplicationMemoryUsed + "Mb-App  " + +OS.TotalPhysicalMemoryUsed + "Mb-Used  " + OS.TotalPhysicalMemory + "Mb-Total");

            //Import external libraries specific to physical server location (cloud/local)
            try
            {
                // grab the right export based on configuration
                Api = Composer.Instance.GetExportedValueByTypeName<IApi>(Config.Get("api-handler"));
                Notify = Composer.Instance.GetExportedValueByTypeName<IMessagingHandler>(Config.Get("messaging-handler"));
                JobQueue = Composer.Instance.GetExportedValueByTypeName<IJobQueueHandler>(Config.Get("job-queue-handler"));
            }
            catch (CompositionException compositionException)
            { Log.Error("Engine.Main(): Failed to load library: " + compositionException);
            }

            //Setup packeting, queue and controls system: These don't do much locally.
            Api.Initialize();
            Notify.Initialize();
            JobQueue.Initialize();

            //Start monitoring the backtest active status:
            var statusPingThread = new Thread(StateCheck.Ping.Run);
            statusPingThread.Start();

            try
            {
                //Reset algo manager internal variables preparing for a new algorithm.
                AlgorithmManager.ResetManager();

                //Reset thread holders.
                var initializeComplete = false;
                Thread threadFeed = null;
                Thread threadTransactions = null;
                Thread threadResults = null;
                Thread threadRealTime = null;

                do
                {
                    //-> Pull job from QuantConnect job queue, or, pull local build:
                    job = JobQueue.NextJob(out algorithmPath); // Blocking.

                    // if the job version doesn't match this instance version then we can't process it
                    // we also don't want to reprocess redelivered live jobs
                    if (job.Version != Constants.Version || (LiveMode && job.Redelivered))
                    {
                        Log.Error("Engine.Run(): Job Version: " + job.Version + "  Deployed Version: " + Constants.Version);

                        //Tiny chance there was an uncontrolled collapse of a server, resulting in an old user task circulating.
                        //In this event kill the old algorithm and leave a message so the user can later review.
                        JobQueue.AcknowledgeJob(job);
                        Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, _collapseMessage);
                        Notify.SetChannel(job.Channel);
                        Notify.RuntimeError(job.AlgorithmId, _collapseMessage);
                        job = null;
                    }
                } while (job == null);

                //-> Initialize messaging system
                Notify.SetChannel(job.Channel);

                //-> Create SetupHandler to configure internal algorithm state:
                SetupHandler = GetSetupHandler(job.SetupEndpoint);

                //-> Set the result handler type for this algorithm job, and launch the associated result thread.
                ResultHandler = GetResultHandler(job);
                threadResults = new Thread(ResultHandler.Run, 0) {Name = "Result Thread"};
                threadResults.Start();

                try
                {
                    // Save algorithm to cache, load algorithm instance:
                    algorithm = SetupHandler.CreateAlgorithmInstance(algorithmPath);

                    //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method.
                    initializeComplete = SetupHandler.Setup(algorithm, out _brokerage, job);

                    //If there are any reasons it failed, pass these back to the IDE.
                    if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || SetupHandler.Errors.Count > 0)
                    {
                        initializeComplete = false;
                        //Get all the error messages: internal in algorithm and external in setup handler.
                        var errorMessage = String.Join(",", algorithm.ErrorMessages);
                        errorMessage += String.Join(",", SetupHandler.Errors);
                        ResultHandler.RuntimeError(errorMessage);
                        Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError);
                    }
                }
                catch (Exception err)
                {
                    var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err.StackTrace;
                    ResultHandler.RuntimeError(runtimeMessage, err.StackTrace);
                    Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage);
                }

                //-> Using the job + initialization: load the designated handlers:
                if (initializeComplete)
                {
                    //-> Reset the backtest stopwatch; we're now running the algorithm.
                    startTime = DateTime.Now;

                    //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates.
                    algorithm.SetAlgorithmId(job.AlgorithmId);
                    algorithm.SetLiveMode(LiveMode);
                    algorithm.SetLocked();

                    //Load the associated handlers for data, transaction and realtime events:
                    ResultHandler.SetAlgorithm(algorithm);
                    DataFeed            = GetDataFeedHandler(algorithm, job);
                    TransactionHandler  = GetTransactionHandler(algorithm, _brokerage, ResultHandler, job);
                    RealTimeHandler     = GetRealTimeHandler(algorithm, _brokerage, DataFeed, ResultHandler, job);

                    //Set the error handlers for the brokerage asynchronous errors.
                    SetupHandler.SetupErrorHandler(ResultHandler, _brokerage);

                    //Send status to user the algorithm is now executing.
                    ResultHandler.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running);

                    //Launch the data, transaction and realtime handlers into dedicated threads
                    threadFeed = new Thread(DataFeed.Run) {Name = "DataFeed Thread"};
                    threadTransactions = new Thread(TransactionHandler.Run) {Name = "Transaction Thread"};
                    threadRealTime = new Thread(RealTimeHandler.Run) {Name = "RealTime Thread"};

                    //Launch the data feed, result sending, and transaction models/handlers in separate threads.
                    threadFeed.Start(); // Data feed pushing data packets into thread bridge;
                    threadTransactions.Start(); // Transaction modeller scanning new order requests
                    threadRealTime.Start(); // RealTime scan time for time based events:

                    // Result manager scanning message queue: (started earlier)
                    ResultHandler.DebugMessage(string.Format("Launching analysis for {0} with LEAN Engine v{1}", job.AlgorithmId, Constants.Version));

                    try
                    {
                        // Execute the Algorithm Code:
                        var complete = Isolator.ExecuteWithTimeLimit(SetupHandler.MaximumRuntime, AlgorithmManager.TimeLoopWithinLimits, () =>
                        {
                            try
                            {
                                //Run Algorithm Job:
                                // -> Using this Data Feed,
                                // -> Send Orders to this TransactionHandler,
                                // -> Send Results to ResultHandler.
                                AlgorithmManager.Run(job, algorithm, DataFeed, TransactionHandler, ResultHandler, SetupHandler, RealTimeHandler);
                            }
                            catch (Exception err)
                            {
                                //Debugging at this level is difficult, stack trace needed.
                                Log.Error("Engine.Run", err);
                            }

                            Log.Trace("Engine.Run(): Exiting Algorithm Manager");

                            }, job.UserPlan == UserPlan.Free ? 1024 : MaximumRamAllocation);

                        if (!complete)
                        {
                            Log.Error("Engine.Main(): Failed to complete in time: " + SetupHandler.MaximumRuntime.ToString("F"));
                            throw new Exception("Failed to complete algorithm within " + SetupHandler.MaximumRuntime.ToString("F") + " seconds. Please make it run faster.");
                        }

                        // Algorithm runtime error:
                        if (algorithm.RunTimeError != null)
                        {
                            throw algorithm.RunTimeError;
                        }
                    }
                    catch (Exception err)
                    {
                        //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed.
                        Log.Error("Engine.Run(): Breaking out of parent try-catch: " + err.Message + " " + err.StackTrace);
                        if (DataFeed != null) DataFeed.Exit();
                        if (ResultHandler != null)
                        {
                            var message = "Runtime Error: " + err.Message;
                            Log.Trace("Engine.Run(): Sending runtime error to user...");
                            ResultHandler.LogMessage(message);
                            ResultHandler.RuntimeError(message, err.StackTrace);
                            Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, message + " Stack Trace: " + err.StackTrace);
                        }
                    }

                    //Send result data back: this entire code block could be rewritten.
                    // todo: - Split up statistics class, its enormous.
                    // todo: - Make a dedicated Statistics.Benchmark class.
                    // todo: - Move all creation and transmission of statistics out of primary engine loop.
                    // todo: - Statistics.Generate(algorithm, resulthandler, transactionhandler);

                    try
                    {
                        var charts = new Dictionary<string, Chart>(ResultHandler.Charts);
                        var orders = new Dictionary<int, Order>(algorithm.Transactions.Orders);
                        var holdings = new Dictionary<string, Holding>();
                        var statistics = new Dictionary<string, string>();
                        var banner = new Dictionary<string, string>();

                        try
                        {
                            //Generates error when things don't exist (no charting logged, runtime errors in main algo execution)
                            const string strategyEquityKey = "Strategy Equity";
                            const string equityKey = "Equity";
                            const string dailyPerformanceKey = "Daily Performance";

                            // make sure we've taken samples for these series before just blindly requesting them
                            if (charts.ContainsKey(strategyEquityKey) &&
                                charts[strategyEquityKey].Series.ContainsKey(equityKey) &&
                                charts[strategyEquityKey].Series.ContainsKey(dailyPerformanceKey))
                            {
                                var equity = charts[strategyEquityKey].Series[equityKey].Values;
                                var performance = charts[strategyEquityKey].Series[dailyPerformanceKey].Values;
                                var profitLoss =
                                    new SortedDictionary<DateTime, decimal>(algorithm.Transactions.TransactionRecord);
                                statistics = Statistics.Statistics.Generate(equity, profitLoss, performance,
                                    SetupHandler.StartingPortfolioValue, algorithm.Portfolio.TotalFees, 252);
                            }
                        }
                        catch (Exception err)
                        {
                            Log.Error("Algorithm.Node.Engine(): Error generating statistics packet: " + err.Message);
                        }

                        //Diagnostics Completed, Send Result Packet:
                        var totalSeconds = (DateTime.Now - startTime).TotalSeconds;
                        ResultHandler.DebugMessage(string.Format("Algorithm Id:({0}) completed in {1} seconds at {2}k data points per second. Processing total of {3} data points.",
                            job.AlgorithmId, totalSeconds.ToString("F2"), ((AlgorithmManager.DataPoints / (double)1000) / totalSeconds).ToString("F0"), AlgorithmManager.DataPoints.ToString("N0")));

                        ResultHandler.SendFinalResult(job, orders, algorithm.Transactions.TransactionRecord, holdings, statistics, banner);
                    }
                    catch (Exception err)
                    {
                        Log.Error("Engine.Main(): Error sending analysis result: " + err.Message + "  ST >> " + err.StackTrace);
                    }

                    //Before we return, send terminate commands to close up the threads
                    TransactionHandler.Exit();
                    DataFeed.Exit();
                    RealTimeHandler.Exit();
                }

                //Close result handler:
                ResultHandler.Exit();
                StateCheck.Ping.Exit();

                //Wait for the threads to complete:
                var ts = Stopwatch.StartNew();
                while ((ResultHandler.IsActive || (TransactionHandler != null && TransactionHandler.IsActive) || (DataFeed != null && DataFeed.IsActive)) && ts.ElapsedMilliseconds < 30 * 1000)
                {
                    Thread.Sleep(100); Log.Trace("Waiting for threads to exit...");
                }

                //Terminate threads still in active state.
                if (threadFeed != null && threadFeed.IsAlive) threadFeed.Abort();
                if (threadTransactions != null && threadTransactions.IsAlive) threadTransactions.Abort();
                if (threadResults != null && threadResults.IsAlive) threadResults.Abort();
                if (statusPingThread != null && statusPingThread.IsAlive) statusPingThread.Abort();

                if (_brokerage != null)
                {
                    _brokerage.Disconnect();
                }
                if (SetupHandler != null)
                {
                    SetupHandler.Dispose();
                }
                Log.Trace("Engine.Main(): Analysis Completed and Results Posted.");
            }
            catch (Exception err)
            {
                Log.Error("Engine.Main(): Error running algorithm: " + err.Message + " >> " + err.StackTrace);
            }
            finally
            {
                //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions:
                if (LiveMode && AlgorithmManager.State != AlgorithmStatus.Running && AlgorithmManager.State != AlgorithmStatus.RuntimeError)
                    Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmManager.State);

                //Delete the message from the job queue:
                JobQueue.AcknowledgeJob(job);
                Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId);

                //Attempt to clean up ram usage:
                GC.Collect();
            }

            //Final disposals.
            Api.Dispose();

            // Make the console window pause so we can read log output before exiting and killing the application completely
            if (IsLocal)
            {
                Log.Trace("Engine.Main(): Analysis Complete. Press any key to continue.");
                Console.Read();
            }
            Log.LogHandler.Dispose();
        }
Esempio n. 15
0
        private static void ConsumeBridge(IDataFeed feed, TimeSpan timeout, bool alwaysInvoke, Action<TimeSlice> handler, bool noOutput = false)
        {
            bool startedReceivingata = false;
            foreach (var timeSlice in feed)
            {
                if (!noOutput)
                {
                    Console.WriteLine("\r\n" + "Now (EDT): {0} TimeSlice.Time (EDT): {1}",
                        DateTime.UtcNow.ConvertFromUtc(TimeZones.NewYork).ToString("o"),
                        timeSlice.Time.ConvertFromUtc(TimeZones.NewYork).ToString("o")
                        );
                }

                if (!startedReceivingata && timeSlice.Slice.Count != 0)
                {
                    startedReceivingata = true;
                }
                if (startedReceivingata || alwaysInvoke)
                {
                    handler(timeSlice);
                }
            }
        }
Esempio n. 16
0
 /// <summary>
 /// Initializes a new instance of the <see cref="UniverseSelection"/> class
 /// </summary>
 /// <param name="dataFeed">The data feed to add/remove subscriptions from</param>
 /// <param name="algorithm">The algorithm to add securities to</param>
 /// <param name="isLiveMode">True for live mode, false for back test mode</param>
 public UniverseSelection(IDataFeed dataFeed, IAlgorithm algorithm, bool isLiveMode)
 {
     _dataFeed = dataFeed;
     _algorithm = algorithm;
     _mapFileResolversByMarket = new Dictionary<string, MapFileResolver>();
 }
Esempio n. 17
0
 public DataManagerStub(IDataFeed dataFeed, IAlgorithm algorithm, ITimeKeeper timeKeeper)
     : this(dataFeed, algorithm, timeKeeper, MarketHoursDatabase.FromDataFolder(), SymbolPropertiesDatabase.FromDataFolder())
 {
 }
Esempio n. 18
0
 private void CreateDataFeed(
     FuncDataQueueHandler funcDataQueueHandler = null)
 {
     _feed = new TestableLiveTradingDataFeed(funcDataQueueHandler ?? new FuncDataQueueHandler(x => Enumerable.Empty <BaseData>()));
 }
 /// <summary>
 /// Initializes a new instance of the <see cref="LeanEngineAlgorithmHandlers"/> class from the specified handlers
 /// </summary>
 /// <param name="results">The result handler for communicating results from the algorithm</param>
 /// <param name="setup">The setup handler used to initialize algorithm state</param>
 /// <param name="dataFeed">The data feed handler used to pump data to the algorithm</param>
 /// <param name="transactions">The transaction handler used to process orders from the algorithm</param>
 /// <param name="realTime">The real time handler used to process real time events</param>
 /// <param name="historyProvider">The history provider used to process historical data requests</param>
 /// <param name="commandQueue">The command queue handler used to receive external commands for the algorithm</param>
 /// <param name="mapFileProvider">The map file provider used to retrieve map files for the data feed</param>
 /// <param name="factorFileProvider">Map file provider used as a map file source for the data feed</param>
 /// <param name="dataFileProvider">file provider used to retrieve security data if it is not on the file system</param>
 public LeanEngineAlgorithmHandlers(IResultHandler results,
                                    ISetupHandler setup,
                                    IDataFeed dataFeed,
                                    ITransactionHandler transactions,
                                    IRealTimeHandler realTime,
                                    IHistoryProvider historyProvider,
                                    ICommandQueueHandler commandQueue,
                                    IMapFileProvider mapFileProvider,
                                    IFactorFileProvider factorFileProvider,
                                    IDataFileProvider dataFileProvider
                                    )
 {
     if (results == null)
     {
         throw new ArgumentNullException("results");
     }
     if (setup == null)
     {
         throw new ArgumentNullException("setup");
     }
     if (dataFeed == null)
     {
         throw new ArgumentNullException("dataFeed");
     }
     if (transactions == null)
     {
         throw new ArgumentNullException("transactions");
     }
     if (realTime == null)
     {
         throw new ArgumentNullException("realTime");
     }
     if (historyProvider == null)
     {
         throw new ArgumentNullException("realTime");
     }
     if (commandQueue == null)
     {
         throw new ArgumentNullException("commandQueue");
     }
     if (mapFileProvider == null)
     {
         throw new ArgumentNullException("mapFileProvider");
     }
     if (factorFileProvider == null)
     {
         throw new ArgumentNullException("factorFileProvider");
     }
     if (dataFileProvider == null)
     {
         throw new ArgumentNullException("dataFileProvider");
     }
     _results            = results;
     _setup              = setup;
     _dataFeed           = dataFeed;
     _transactions       = transactions;
     _realTime           = realTime;
     _historyProvider    = historyProvider;
     _commandQueue       = commandQueue;
     _mapFileProvider    = mapFileProvider;
     _factorFileProvider = factorFileProvider;
     _dataFileProvider   = dataFileProvider;
 }
        /// <summary>
        /// Initializes a new instance of the <see cref="LeanEngineAlgorithmHandlers"/> class from the specified handlers
        /// </summary>
        /// <param name="results">The result handler for communicating results from the algorithm</param>
        /// <param name="setup">The setup handler used to initialize algorithm state</param>
        /// <param name="dataFeed">The data feed handler used to pump data to the algorithm</param>
        /// <param name="transactions">The transaction handler used to process orders from the algorithm</param>
        /// <param name="realTime">The real time handler used to process real time events</param>
        /// <param name="mapFileProvider">The map file provider used to retrieve map files for the data feed</param>
        /// <param name="factorFileProvider">Map file provider used as a map file source for the data feed</param>
        /// <param name="dataProvider">file provider used to retrieve security data if it is not on the file system</param>
        /// <param name="alphas">The alpha handler used to process generated insights</param>
        /// <param name="objectStore">The object store used for persistence</param>
        /// <param name="dataPermissionsManager">The data permission manager to use</param>
        public LeanEngineAlgorithmHandlers(IResultHandler results,
                                           ISetupHandler setup,
                                           IDataFeed dataFeed,
                                           ITransactionHandler transactions,
                                           IRealTimeHandler realTime,
                                           IMapFileProvider mapFileProvider,
                                           IFactorFileProvider factorFileProvider,
                                           IDataProvider dataProvider,
                                           IAlphaHandler alphas,
                                           IObjectStore objectStore,
                                           IDataPermissionManager dataPermissionsManager
                                           )
        {
            if (results == null)
            {
                throw new ArgumentNullException(nameof(results));
            }
            if (setup == null)
            {
                throw new ArgumentNullException(nameof(setup));
            }
            if (dataFeed == null)
            {
                throw new ArgumentNullException(nameof(dataFeed));
            }
            if (transactions == null)
            {
                throw new ArgumentNullException(nameof(transactions));
            }
            if (realTime == null)
            {
                throw new ArgumentNullException(nameof(realTime));
            }
            if (mapFileProvider == null)
            {
                throw new ArgumentNullException(nameof(mapFileProvider));
            }
            if (factorFileProvider == null)
            {
                throw new ArgumentNullException(nameof(factorFileProvider));
            }
            if (dataProvider == null)
            {
                throw new ArgumentNullException(nameof(dataProvider));
            }
            if (alphas == null)
            {
                throw new ArgumentNullException(nameof(alphas));
            }
            if (objectStore == null)
            {
                throw new ArgumentNullException(nameof(objectStore));
            }
            if (dataPermissionsManager == null)
            {
                throw new ArgumentNullException(nameof(dataPermissionsManager));
            }

            Results                = results;
            Setup                  = setup;
            DataFeed               = dataFeed;
            Transactions           = transactions;
            RealTime               = realTime;
            MapFileProvider        = mapFileProvider;
            FactorFileProvider     = factorFileProvider;
            DataProvider           = dataProvider;
            Alphas                 = alphas;
            ObjectStore            = objectStore;
            DataPermissionsManager = dataPermissionsManager;
        }
Esempio n. 21
0
        /********************************************************
         * CLASS PROPERTIES
         *********************************************************/

        /********************************************************
         * CLASS METHODS
         *********************************************************/
        /// <summary>
        /// Process over the datafeed cross thread bridges to generate an enumerable sorted collection of the data, ready for a consumer
        /// to use and already synchronized in time.
        /// </summary>
        /// <param name="feed">DataFeed object</param>
        /// <param name="frontierOrigin">Starting date for the data feed</param>
        /// <returns></returns>
        public static IEnumerable <SortedDictionary <DateTime, Dictionary <int, List <BaseData> > > > GetData(IDataFeed feed, DateTime frontierOrigin)
        {
            //Initialize:
            long earlyBirdTicks = 0;
            var  increment      = TimeSpan.FromSeconds(1);

            _subscriptions = feed.Subscriptions.Count;
            var frontier     = frontierOrigin;
            var nextEmitTime = DateTime.MinValue;

            //Wait for datafeeds to be ready, wait for first data to arrive:
            while (feed.Bridge.Length != _subscriptions)
            {
                Thread.Sleep(100);
            }

            //Get all data in queues: return as a sorted dictionary:
            while (!feed.EndOfBridges)
            {
                //Reset items which are not fill forward:
                earlyBirdTicks = 0;
                var newData = new SortedDictionary <DateTime, Dictionary <int, List <BaseData> > >();

                // spin wait until the feed catches up to our frontier
                WaitForDataOrEndOfBridges(feed, frontier);

                for (var i = 0; i < _subscriptions; i++)
                {
                    //If there's data, download a little of it: Put 100 items of each type into the queue maximum
                    while (feed.Bridge[i].Count > 0)
                    {
                        //Log.Debug("DataStream.GetData(): Bridge has data: Bridge Count:" + feed.Bridge[i].Count.ToString());

                        //Look at first item on list, leave it there until time passes this item.
                        List <BaseData> result;
                        if (!feed.Bridge[i].TryPeek(out result) || (result.Count > 0 && result[0].Time > frontier))
                        {
                            if (result != null)
                            {
                                //Log.Debug("DataStream.GetData(): Result != null: " + result[0].Time.ToShortTimeString());
                                if (earlyBirdTicks == 0 || earlyBirdTicks > result[0].Time.Ticks)
                                {
                                    earlyBirdTicks = result[0].Time.Ticks;
                                }
                            }
                            break;
                        }

                        //Pull a grouped time list out of the bridge
                        List <BaseData> dataPoints;
                        if (feed.Bridge[i].TryDequeue(out dataPoints))
                        {
                            //Log.Debug("DataStream.GetData(): Bridge has data: DataPoints Count: " + dataPoints.Count);
                            foreach (var point in dataPoints)
                            {
                                //Add the new data point to the list of generic points in this timestamp.
                                if (!newData.ContainsKey(point.Time))
                                {
                                    newData.Add(point.Time, new Dictionary <int, List <BaseData> >());
                                }
                                if (!newData[point.Time].ContainsKey(i))
                                {
                                    newData[point.Time].Add(i, new List <BaseData>());
                                }
                                //Add the data point:
                                newData[point.Time][i].Add(point);
                                //Log.Debug("DataStream.GetData(): Added Datapoint: Time:" + point.Time.ToShortTimeString() + " Symbol: " + point.Symbol);
                            }
                        }
                        else
                        {
                            //Should never fail:
                            Log.Error("DataStream.GetData(): Failed to dequeue bridge item");
                        }
                    }
                }

                //Update the frontier and start again.
                if (earlyBirdTicks > 0)
                {
                    //Seek forward in time to next data event from stream: there's nothing here for us to do now: why loop over empty seconds?
                    frontier = new DateTime(earlyBirdTicks);
                }
                else
                {
                    frontier += increment;
                }

                if (newData.Count > 0)
                {
                    yield return(newData);
                }

                //Allow loop pass through emits every second to allow event handling (liquidate/stop/ect...)
                if (Engine.LiveMode && DateTime.Now > nextEmitTime)
                {
                    nextEmitTime = DateTime.Now + TimeSpan.FromSeconds(1);
                    yield return(new SortedDictionary <DateTime, Dictionary <int, List <BaseData> > >());
                }
            }
            Log.Trace("DataStream.GetData(): All Streams Completed.");
        }
Esempio n. 22
0
        /********************************************************
         * CLASS METHODS
         *********************************************************/
        /// <summary>
        /// Launch the algorithm manager to run this strategy
        /// </summary>
        /// <param name="job">Algorithm job</param>
        /// <param name="algorithm">Algorithm instance</param>
        /// <param name="feed">Datafeed object</param>
        /// <param name="transactions">Transaction manager object</param>
        /// <param name="results">Result handler object</param>
        /// <param name="setup">Setup handler object</param>
        /// <param name="realtime">Realtime processing object</param>
        /// <remarks>Modify with caution</remarks>
        public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime)
        {
            //Initialize:
            var backwardsCompatibilityMode = false;
            var tradebarsType          = typeof(TradeBars);
            var ticksType              = typeof(Ticks);
            var startingPortfolioValue = setup.StartingCapital;
            var backtestMode           = (job.Type == PacketType.BacktestNode);
            var methodInvokers         = new Dictionary <Type, MethodInvoker>();

            //Initialize Properties:
            _frontier       = setup.StartingDate;
            _runtimeError   = null;
            _algorithmId    = job.AlgorithmId;
            _algorithmState = AlgorithmStatus.Running;
            _previousTime   = setup.StartingDate.Date;

            //Create the method accessors to push generic types into algorithm: Find all OnData events:

            //Algorithm 1.0 Data Accessors.
            //If the users defined these methods, add them in manually. This allows keeping backwards compatibility to algorithm 1.0.
            var oldTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnTradeBar", new[] { typeof(Dictionary <string, TradeBar>) });
            var oldTicksMethodInfo     = (algorithm.GetType()).GetMethod("OnTick", new[] { typeof(Dictionary <string, List <Tick> >) });

            //Algorithm 2.0 Data Generics Accessors.
            //New hidden access to tradebars with custom type.
            var newTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { tradebarsType });
            var newTicksMethodInfo     = (algorithm.GetType()).GetMethod("OnData", new[] { ticksType });

            if (newTradeBarsMethodInfo == null && newTicksMethodInfo == null)
            {
                backwardsCompatibilityMode = true;
                if (oldTradeBarsMethodInfo != null)
                {
                    methodInvokers.Add(tradebarsType, oldTradeBarsMethodInfo.DelegateForCallMethod());
                }
                if (oldTradeBarsMethodInfo != null)
                {
                    methodInvokers.Add(ticksType, oldTicksMethodInfo.DelegateForCallMethod());
                }
            }
            else
            {
                backwardsCompatibilityMode = false;
                if (newTradeBarsMethodInfo != null)
                {
                    methodInvokers.Add(tradebarsType, newTradeBarsMethodInfo.DelegateForCallMethod());
                }
                if (newTicksMethodInfo != null)
                {
                    methodInvokers.Add(ticksType, newTicksMethodInfo.DelegateForCallMethod());
                }
            }

            //Go through the subscription types and create invokers to trigger the event handlers for each custom type:
            foreach (var config in feed.Subscriptions)
            {
                //If type is a tradebar, combine tradebars and ticks into unified array:
                if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick")
                {
                    //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. }
                    var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type });

                    //Is we already have this Type-handler then don't add it to invokers again.
                    if (methodInvokers.ContainsKey(config.Type))
                    {
                        continue;
                    }

                    //If we couldnt find the event handler, let the user know we can't fire that event.
                    if (genericMethod == null)
                    {
                        _runtimeError   = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) {  }");
                        _algorithmState = AlgorithmStatus.RuntimeError;
                        return;
                    }
                    methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod());
                }
            }

            //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm.
            Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop.");
            foreach (var newData in DataStream.GetData(feed, setup.StartingDate))
            {
                //Check this backtest is still running:
                if (_algorithmState != AlgorithmStatus.Running)
                {
                    break;
                }

                //Go over each time stamp we've collected, pass it into the algorithm in order:
                foreach (var time in newData.Keys)
                {
                    //Set the time frontier:
                    _frontier = time;

                    //Execute with TimeLimit Monitor:
                    if (Isolator.IsCancellationRequested)
                    {
                        return;
                    }

                    //Fire EOD if the time packet we just processed is greater
                    if (backtestMode)
                    {
                        //Refresh the realtime event monitor:
                        //in backtest mode use the algorithms clock as realtime.
                        realtime.SetTime(time);

                        //On day-change sample equity and daily performance for statistics calculations
                        if (_previousTime.Date != time.Date)
                        {
                            //Sample the portfolio value over time for chart.
                            results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));
                            if (startingPortfolioValue == 0)
                            {
                                results.SamplePerformance(_previousTime.Date, 0);
                            }
                            else
                            {
                                results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10));
                            }
                            startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue;
                        }
                    }

                    //Check if the user's signalled Quit: loop over data until day changes.
                    if (algorithm.GetQuit())
                    {
                        _algorithmState = AlgorithmStatus.Quit;
                        break;
                    }

                    //Pass in the new time first:
                    algorithm.SetDateTime(time);

                    //Trigger the data events: Invoke the types we have data for:
                    var oldBars  = new Dictionary <string, TradeBar>();
                    var oldTicks = new Dictionary <string, List <Tick> >();
                    var newBars  = new TradeBars(time);
                    var newTicks = new Ticks(time);

                    //Invoke all non-tradebars, non-ticks methods:
                    // --> i == Subscription Configuration Index, so we don't need to compare types.
                    foreach (var i in newData[time].Keys)
                    {
                        //Data point and config of this point:
                        var dataPoints = newData[time][i];
                        var config     = feed.Subscriptions[i];

                        //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop.
                        foreach (var dataPoint in dataPoints)
                        {
                            //Update the securities properties: first before calling user code to avoid issues with data
                            algorithm.Securities.Update(time, dataPoint);

                            //Update registered consolidators for this symbol index
                            for (var j = 0; j < config.Consolidators.Count; j++)
                            {
                                config.Consolidators[j].Update(dataPoint);
                            }

                            switch (config.Type.Name)
                            {
                            case "TradeBar":
                                var bar = dataPoint as TradeBar;
                                try
                                {
                                    if (bar != null)
                                    {
                                        if (backwardsCompatibilityMode)
                                        {
                                            if (!oldBars.ContainsKey(bar.Symbol))
                                            {
                                                oldBars.Add(bar.Symbol, bar);
                                            }
                                        }
                                        else
                                        {
                                            if (!newBars.ContainsKey(bar.Symbol))
                                            {
                                                newBars.Add(bar.Symbol, bar);
                                            }
                                        }
                                    }
                                }
                                catch (Exception err)
                                {
                                    Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C"));
                                    Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message);
                                }
                                break;

                            case "Tick":
                                var tick = dataPoint as Tick;
                                if (tick != null)
                                {
                                    if (backwardsCompatibilityMode)
                                    {
                                        if (!oldTicks.ContainsKey(tick.Symbol))
                                        {
                                            oldTicks.Add(tick.Symbol, new List <Tick>());
                                        }
                                        oldTicks[tick.Symbol].Add(tick);
                                    }
                                    else
                                    {
                                        if (!newTicks.ContainsKey(tick.Symbol))
                                        {
                                            newTicks.Add(tick.Symbol, new List <Tick>());
                                        }
                                        newTicks[tick.Symbol].Add(tick);
                                    }
                                }
                                break;

                            default:
                                //Send data into the generic algorithm event handlers
                                try
                                {
                                    methodInvokers[config.Type](algorithm, dataPoint);
                                }
                                catch (Exception err)
                                {
                                    _runtimeError   = err;
                                    _algorithmState = AlgorithmStatus.RuntimeError;
                                    Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace);
                                    return;
                                }
                                break;
                            }
                        }
                    }

                    //After we've fired all other events in this second, fire the pricing events:
                    if (backwardsCompatibilityMode)
                    {
                        //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers...");
                        try
                        {
                            if (oldTradeBarsMethodInfo != null && oldBars.Count > 0)
                            {
                                methodInvokers[tradebarsType](algorithm, oldBars);
                            }
                            if (oldTicksMethodInfo != null && oldTicks.Count > 0)
                            {
                                methodInvokers[ticksType](algorithm, oldTicks);
                            }
                        }
                        catch (Exception err)
                        {
                            _runtimeError   = err;
                            _algorithmState = AlgorithmStatus.RuntimeError;
                            Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }
                    else
                    {
                        //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers...");
                        try
                        {
                            if (newTradeBarsMethodInfo != null && newBars.Count > 0)
                            {
                                methodInvokers[tradebarsType](algorithm, newBars);
                            }
                            if (newTicksMethodInfo != null && newTicks.Count > 0)
                            {
                                methodInvokers[ticksType](algorithm, newTicks);
                            }
                        }
                        catch (Exception err)
                        {
                            _runtimeError   = err;
                            _algorithmState = AlgorithmStatus.RuntimeError;
                            Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }

                    //If its the historical/paper trading models, wait until market orders have been "filled"
                    // Manually trigger the event handler to prevent thread switch.
                    transactions.ProcessSynchronousEvents();

                    //Save the previous time for the sample calculations
                    _previousTime = time;
                } // End of Time Loop

                // Process any required events of the results handler such as sampling assets, equity, or stock prices.
                results.ProcessSynchronousEvents();
            } // End of ForEach DataStream

            //Stream over:: Send the final packet and fire final events:
            Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm...");
            try
            {
                algorithm.OnEndOfAlgorithm();
            }
            catch (Exception err)
            {
                _algorithmState = AlgorithmStatus.RuntimeError;
                _runtimeError   = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException);
                Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace);
                return;
            }

            // Process any required events of the results handler such as sampling assets, equity, or stock prices.
            results.ProcessSynchronousEvents(forceProcess: true);

            //Liquidate Holdings for Calculations:
            if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode)
            {
                Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings...");
                algorithm.Liquidate();
                results.LogMessage("Algorithm Liquidated");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated);
            }

            //Manually stopped the algorithm
            if (_algorithmState == AlgorithmStatus.Stopped)
            {
                Log.Trace("AlgorithmManager.Run(): Stopping algorithm...");
                results.LogMessage("Algorithm Stopped");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped);
            }

            //Backtest deleted.
            if (_algorithmState == AlgorithmStatus.Deleted)
            {
                Log.Trace("AlgorithmManager.Run(): Deleting algorithm...");
                results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request.");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted);
            }

            //Algorithm finished, send regardless of commands:
            results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed);

            //Take final samples:
            results.SampleRange(algorithm.GetChartUpdates());
            results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));
            results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10));
        } // End of Run();
 /// <summary>
 /// Initialize the result handler with this result packet.
 /// </summary>
 /// <param name="job">Algorithm job packet for this result handler</param>
 /// <param name="messagingHandler"></param>
 /// <param name="api"></param>
 /// <param name="dataFeed"></param>
 /// <param name="setupHandler"></param>
 /// <param name="transactionHandler"></param>
 public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
 {
     _api = api;
     _dataFeed = dataFeed;
     _messagingHandler = messagingHandler;
     _setupHandler = setupHandler;
     _transactionHandler = transactionHandler;
     _job = (LiveNodePacket)job;
     if (_job == null) throw new Exception("LiveResultHandler.Constructor(): Submitted Job type invalid."); 
     _deployId = _job.DeployId;
     _compileId = _job.CompileId;
 }
Esempio n. 24
0
 private void ConsumeBridge(IAlgorithm algorithm, IDataFeed feed, TimeSpan timeout, Action <TimeSlice> handler)
 {
     ConsumeBridge(algorithm, feed, timeout, false, handler);
 }
Esempio n. 25
0
 private static void ConsumeBridge(IDataFeed feed, Action<TimeSlice> handler)
 {
     ConsumeBridge(feed, TimeSpan.FromSeconds(10), handler);
 }
Esempio n. 26
0
        private void SubscribeAlert(AlertSubscribeRequest aRequest)
        {
            var errorString = TradeScript.Validate(aRequest.Script);

            if (aRequest.Script == string.Empty)
            {
                errorString = "Script can not be empty";
            }

            if (errorString != string.Empty)
            {
                var response = new AlertSubscribeResponse
                {
                    Id        = aRequest.Id,
                    Error     = errorString,
                    AlertName = aRequest.AlertName,
                    Alert     = new Alert
                    {
                        Name   = aRequest.Name,
                        Symbol = aRequest.Symbol
                    },
                    User = aRequest.User
                };
                PushResponses(aRequest.User.ID, new ResponseMessage[] { response });
                return;
            }

            var       selection = new HistoryParameters(aRequest.Id, aRequest.Symbol, aRequest.Periodicity, aRequest.Interval, aRequest.BarsCount);
            IDataFeed aDataFeed = GetDataFeedByName(aRequest.Symbol.DataFeed);

            if (aDataFeed != null)
            {
                GetHistoryCtx aCtx = GetHistoryCtx(selection, aDataFeed.TimeZoneInfo);
                aCtx.Request  = new HistoryRequest(selection);
                aCtx.DataFeed = aDataFeed;
                ThreadPool.QueueUserWorkItem(o =>
                {
                    aDataFeed.GetHistory(aCtx, (ctx, bars) =>
                    {
                        Level1Subscribers subscribers = null;
                        var symbolItem = new SymbolItem()
                        {
                            DataFeed = aRequest.Symbol.DataFeed,
                            Exchange = aRequest.Symbol.Exchange,
                            Symbol   = aRequest.Symbol.Symbol,
                            Type     = aRequest.Symbol.Type
                        };
                        lock (m_Level1SubscribersBySymbols)
                        {
                            if (!m_Level1SubscribersBySymbols.TryGetValue(aRequest.Symbol, out subscribers))
                            {
                                subscribers                  = new Level1Subscribers();
                                subscribers.Subscribers      = new List <string>();
                                subscribers.AlertSubscribers = new List <AlertSubscription>();
                                m_Level1SubscribersBySymbols.Add(aRequest.Symbol, subscribers);
                            }
                        }
                        AlertSubscription alert = new AlertSubscription()
                        {
                            Id              = aRequest.Id,
                            AlertName       = aRequest.AlertName,
                            Symbol          = symbolItem,
                            Name            = aRequest.Name,
                            Periodicity     = aRequest.Periodicity,
                            Interval        = aRequest.Interval,
                            Script          = aRequest.Script,
                            UserSessionId   = aRequest.User.ID,
                            Login           = aRequest.User.Login,
                            CalculationType = aRequest.CalculationType
                        };
                        alert.InitAlert(bars);
                        lock (subscribers.AlertSubscribers)
                        {
                            subscribers.AlertSubscribers.Add(alert);
                        }
                        aDataFeed.Subscribe(aRequest.Symbol);
                    });
                });
            }
        }
Esempio n. 27
0
 public DataManagerStub(IDataFeed dataFeed, IAlgorithm algorithm)
     : this(dataFeed, algorithm, new TimeKeeper(DateTime.UtcNow, TimeZones.NewYork))
 {
 }
Esempio n. 28
0
        /********************************************************
         * CLASS METHODS
         *********************************************************/
        /// <summary>
        /// Process over the datafeed cross thread bridges to generate an enumerable sorted collection of the data, ready for a consumer
        /// to use and already synchronized in time.
        /// </summary>
        /// <param name="feed">DataFeed object</param>
        /// <param name="frontierOrigin">Starting date for the data feed</param>
        /// <returns></returns>
        public static IEnumerable <Dictionary <int, List <BaseData> > > GetData(IDataFeed feed, DateTime frontierOrigin)
        {
            //Initialize:
            long earlyBirdTicks = 0;

            _subscriptions = feed.Subscriptions.Count;
            AlorithmTime   = frontierOrigin;
            long algorithmTime = AlorithmTime.Ticks;
            var  frontier      = frontierOrigin;
            var  nextEmitTime  = DateTime.MinValue;
            var  periods       = feed.Subscriptions.Select(x => x.Resolution.ToTimeSpan()).ToArray();

            //Wait for datafeeds to be ready, wait for first data to arrive:
            while (feed.Bridge.Length != _subscriptions)
            {
                Thread.Sleep(100);
            }

            // clear data first when in live mode, start with fresh data
            if (Engine.LiveMode)
            {
                feed.PurgeData();
            }

            //Get all data in queues: return as a sorted dictionary:
            while (!feed.EndOfBridges)
            {
                //Reset items which are not fill forward:
                earlyBirdTicks = 0;
                var newData = new Dictionary <int, List <BaseData> >();

                // spin wait until the feed catches up to our frontier
                WaitForDataOrEndOfBridges(feed, frontier);

                for (var i = 0; i < _subscriptions; i++)
                {
                    //If there's data on the bridge, check to see if it's time to pull it off, if it's in the future
                    // we'll record the time as 'earlyBirdTicks' so we can fast forward the frontier time
                    while (feed.Bridge[i].Count > 0)
                    {
                        //Look at first item on list, leave it there until time passes this item.
                        List <BaseData> result;
                        if (!feed.Bridge[i].TryPeek(out result))
                        {
                            // if there's no item skip to the next subscription
                            break;
                        }
                        if (result.Count > 0 && result[0].EndTime > frontier)
                        {
                            // we have at least one item, check to see if its in ahead of the frontier,
                            // if so, keep track of how many ticks in the future it is
                            if (earlyBirdTicks == 0 || earlyBirdTicks > result[0].EndTime.Ticks)
                            {
                                earlyBirdTicks = result[0].EndTime.Ticks;
                            }
                            break;
                        }

                        //Pull a grouped time list out of the bridge
                        List <BaseData> dataPoints;
                        if (feed.Bridge[i].TryDequeue(out dataPoints))
                        {
                            // round the time down based on the requested resolution for fill forward data
                            // this is a temporary fix, long term fill forward logic should be moved into this class
                            foreach (var point in dataPoints)
                            {
                                if (point.IsFillForward)
                                {
                                    point.Time = point.Time.RoundDown(periods[i]);
                                }
                                if (algorithmTime < point.EndTime.Ticks)
                                {
                                    // set this to least advanced end point in time, pre rounding
                                    algorithmTime = point.EndTime.Ticks;
                                }
                            }
                            // add the list to the collection to be yielded
                            newData[i] = dataPoints;
                        }
                        else
                        {
                            //Should never fail:
                            Log.Error("DataStream.GetData(): Failed to dequeue bridge item");
                        }
                    }
                }

                if (newData.Count > 0)
                {
                    AlorithmTime = new DateTime(algorithmTime);
                    yield return(newData);
                }

                //Update the frontier and start again.
                if (earlyBirdTicks > 0)
                {
                    //Seek forward in time to next data event from stream: there's nothing here for us to do now: why loop over empty seconds
                    frontier = new DateTime(earlyBirdTicks);
                }
                else if (feed.EndOfBridges)
                {
                    // we're out of data or quit
                    break;
                }

                //Allow loop pass through emits every second to allow event handling (liquidate/stop/ect...)
                if (Engine.LiveMode && DateTime.Now > nextEmitTime)
                {
                    AlorithmTime = DateTime.Now.RoundDown(periods.Min());
                    nextEmitTime = DateTime.Now + TimeSpan.FromSeconds(1);
                    yield return(new Dictionary <int, List <BaseData> >());
                }
            }
            Log.Trace("DataStream.GetData(): All Streams Completed.");
        }
Esempio n. 29
0
        private IEnumerable <TimeSlice> Stream(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, IResultHandler results, CancellationToken cancellationToken)
        {
            bool setStartTime = false;
            var  timeZone     = algorithm.TimeZone;
            var  history      = algorithm.HistoryProvider;

            // fulfilling history requirements of volatility models in live mode
            if (algorithm.LiveMode)
            {
                ProcessVolatilityHistoryRequirements(algorithm);
            }

            // get the required history job from the algorithm
            DateTime?lastHistoryTimeUtc = null;
            var      historyRequests    = algorithm.GetWarmupHistoryRequests().ToList();

            // initialize variables for progress computation
            var start            = DateTime.UtcNow.Ticks;
            var nextStatusTime   = DateTime.UtcNow.AddSeconds(1);
            var minimumIncrement = algorithm.UniverseManager
                                   .Select(x => x.Value.Configuration.Resolution.ToTimeSpan())
                                   .DefaultIfEmpty(Time.OneSecond)
                                   .Min();

            minimumIncrement = minimumIncrement == TimeSpan.Zero ? Time.OneSecond : minimumIncrement;

            if (historyRequests.Count != 0)
            {
                // rewrite internal feed requests
                var subscriptions = algorithm.SubscriptionManager.Subscriptions.Where(x => !x.IsInternalFeed).ToList();
                var minResolution = subscriptions.Count > 0 ? subscriptions.Min(x => x.Resolution) : Resolution.Second;
                foreach (var request in historyRequests)
                {
                    Security security;
                    if (algorithm.Securities.TryGetValue(request.Symbol, out security) && security.IsInternalFeed())
                    {
                        if (request.Resolution < minResolution)
                        {
                            request.Resolution            = minResolution;
                            request.FillForwardResolution = request.FillForwardResolution.HasValue ? minResolution : (Resolution?)null;
                        }
                    }
                }

                // rewrite all to share the same fill forward resolution
                if (historyRequests.Any(x => x.FillForwardResolution.HasValue))
                {
                    minResolution = historyRequests.Where(x => x.FillForwardResolution.HasValue).Min(x => x.FillForwardResolution.Value);
                    foreach (var request in historyRequests.Where(x => x.FillForwardResolution.HasValue))
                    {
                        request.FillForwardResolution = minResolution;
                    }
                }

                foreach (var request in historyRequests)
                {
                    start = Math.Min(request.StartTimeUtc.Ticks, start);
                    Log.Trace(string.Format("AlgorithmManager.Stream(): WarmupHistoryRequest: {0}: Start: {1} End: {2} Resolution: {3}", request.Symbol, request.StartTimeUtc, request.EndTimeUtc, request.Resolution));
                }

                // make the history request and build time slices
                foreach (var slice in history.GetHistory(historyRequests, timeZone))
                {
                    TimeSlice timeSlice;
                    try
                    {
                        // we need to recombine this slice into a time slice
                        var paired = new List <DataFeedPacket>();
                        foreach (var symbol in slice.Keys)
                        {
                            var security = algorithm.Securities[symbol];
                            var data     = slice[symbol];
                            var list     = new List <BaseData>();
                            var ticks    = data as List <Tick>;
                            if (ticks != null)
                            {
                                list.AddRange(ticks);
                            }
                            else
                            {
                                list.Add(data);
                            }
                            paired.Add(new DataFeedPacket(security, security.Subscriptions.First(), list));
                        }
                        timeSlice = TimeSlice.Create(slice.Time.ConvertToUtc(timeZone), timeZone, algorithm.Portfolio.CashBook, paired, SecurityChanges.None);
                    }
                    catch (Exception err)
                    {
                        Log.Error(err);
                        algorithm.RunTimeError = err;
                        yield break;
                    }

                    if (timeSlice != null)
                    {
                        if (!setStartTime)
                        {
                            setStartTime  = true;
                            _previousTime = timeSlice.Time;
                            algorithm.Debug("Algorithm warming up...");
                        }
                        if (DateTime.UtcNow > nextStatusTime)
                        {
                            // send some status to the user letting them know we're done history, but still warming up,
                            // catching up to real time data
                            nextStatusTime = DateTime.UtcNow.AddSeconds(1);
                            var percent = (int)(100 * (timeSlice.Time.Ticks - start) / (double)(DateTime.UtcNow.Ticks - start));
                            results.SendStatusUpdate(AlgorithmStatus.History, string.Format("Catching up to realtime {0}%...", percent));
                        }
                        yield return(timeSlice);

                        lastHistoryTimeUtc = timeSlice.Time;
                    }
                }
            }

            // if we're not live or didn't event request warmup, then set us as not warming up
            if (!algorithm.LiveMode || historyRequests.Count == 0)
            {
                algorithm.SetFinishedWarmingUp();
                results.SendStatusUpdate(AlgorithmStatus.Running);
                if (historyRequests.Count != 0)
                {
                    algorithm.Debug("Algorithm finished warming up.");
                    Log.Trace("AlgorithmManager.Stream(): Finished warmup");
                }
            }

            foreach (var timeSlice in feed)
            {
                if (!setStartTime)
                {
                    setStartTime  = true;
                    _previousTime = timeSlice.Time;
                }
                if (algorithm.LiveMode && algorithm.IsWarmingUp)
                {
                    // this is hand-over logic, we spin up the data feed first and then request
                    // the history for warmup, so there will be some overlap between the data
                    if (lastHistoryTimeUtc.HasValue)
                    {
                        // make sure there's no historical data, this only matters for the handover
                        var hasHistoricalData = false;
                        foreach (var data in timeSlice.Slice.Ticks.Values.SelectMany(x => x).Concat <BaseData>(timeSlice.Slice.Bars.Values))
                        {
                            // check if any ticks in the list are on or after our last warmup point, if so, skip this data
                            if (data.EndTime.ConvertToUtc(algorithm.Securities[data.Symbol].Exchange.TimeZone) >= lastHistoryTimeUtc)
                            {
                                hasHistoricalData = true;
                                break;
                            }
                        }
                        if (hasHistoricalData)
                        {
                            continue;
                        }

                        // prevent us from doing these checks every loop
                        lastHistoryTimeUtc = null;
                    }

                    // in live mode wait to mark us as finished warming up when
                    // the data feed has caught up to now within the min increment
                    if (timeSlice.Time > DateTime.UtcNow.Subtract(minimumIncrement))
                    {
                        algorithm.SetFinishedWarmingUp();
                        results.SendStatusUpdate(AlgorithmStatus.Running);
                        algorithm.Debug("Algorithm finished warming up.");
                        Log.Trace("AlgorithmManager.Stream(): Finished warmup");
                    }
                    else if (DateTime.UtcNow > nextStatusTime)
                    {
                        // send some status to the user letting them know we're done history, but still warming up,
                        // catching up to real time data
                        nextStatusTime = DateTime.UtcNow.AddSeconds(1);
                        var percent = (int)(100 * (timeSlice.Time.Ticks - start) / (double)(DateTime.UtcNow.Ticks - start));
                        results.SendStatusUpdate(AlgorithmStatus.History, string.Format("Catching up to realtime {0}%...", percent));
                    }
                }
                yield return(timeSlice);
            }
        }
Esempio n. 30
0
 /// <summary>
 /// Initializes a new <see cref="DataStream"/> for the specified data feed instance
 /// </summary>
 /// <param name="feed">The data feed to be streamed</param>
 /// <param name="liveMode"></param>
 public DataStream(IDataFeed feed, bool liveMode)
 {
     _feed = feed;
     _liveMode = liveMode;
 }
Esempio n. 31
0
 public DataManagerStub(IDataFeed dataFeed, IAlgorithm algorithm)
     : base(dataFeed, new UniverseSelection(dataFeed, algorithm), algorithm.Settings)
 {
 }
Esempio n. 32
0
        /********************************************************
        * CLASS METHODS
        *********************************************************/
        /// <summary>
        /// Process over the datafeed cross thread bridges to generate an enumerable sorted collection of the data, ready for a consumer
        /// to use and already synchronized in time.
        /// </summary>
        /// <param name="feed">DataFeed object</param>
        /// <param name="frontierOrigin">Starting date for the data feed</param>
        /// <returns></returns>
        public static IEnumerable<Dictionary<int, List<BaseData>>> GetData(IDataFeed feed, DateTime frontierOrigin)
        {
            //Initialize:
            long earlyBirdTicks = 0;
            _subscriptions = feed.Subscriptions.Count;
            AlorithmTime = frontierOrigin;
            long algorithmTime = AlorithmTime.Ticks;
            var frontier = frontierOrigin;
            var nextEmitTime = DateTime.MinValue;
            var periods = feed.Subscriptions.Select(x => x.Resolution.ToTimeSpan()).ToArray();

            //Wait for datafeeds to be ready, wait for first data to arrive:
            while (feed.Bridge.Length != _subscriptions) Thread.Sleep(100);

            // clear data first when in live mode, start with fresh data
            if (Engine.LiveMode)
            {
                feed.PurgeData();
            }

            //Get all data in queues: return as a sorted dictionary:
            while (!feed.EndOfBridges)
            {
                //Reset items which are not fill forward:
                earlyBirdTicks = 0;
                var newData = new Dictionary<int, List<BaseData>>();

                // spin wait until the feed catches up to our frontier
                WaitForDataOrEndOfBridges(feed, frontier);

                for (var i = 0; i < _subscriptions; i++)
                {
                    //If there's data on the bridge, check to see if it's time to pull it off, if it's in the future
                    // we'll record the time as 'earlyBirdTicks' so we can fast forward the frontier time
                    while (feed.Bridge[i].Count > 0)
                    {
                        //Look at first item on list, leave it there until time passes this item.
                        List<BaseData> result;
                        if (!feed.Bridge[i].TryPeek(out result))
                        {
                            // if there's no item skip to the next subscription
                            break;
                        }
                        if (result.Count > 0 && result[0].EndTime > frontier)
                        {
                            // we have at least one item, check to see if its in ahead of the frontier,
                            // if so, keep track of how many ticks in the future it is
                            if (earlyBirdTicks == 0 || earlyBirdTicks > result[0].EndTime.Ticks)
                            {
                                earlyBirdTicks = result[0].EndTime.Ticks;
                            }
                            break;
                        }

                        //Pull a grouped time list out of the bridge
                        List<BaseData> dataPoints;
                        if (feed.Bridge[i].TryDequeue(out dataPoints))
                        {
                            // round the time down based on the requested resolution for fill forward data
                            // this is a temporary fix, long term fill forward logic should be moved into this class
                            foreach (var point in dataPoints)
                            {
                                if (point.IsFillForward)
                                {
                                    point.Time = point.Time.RoundDown(periods[i]);
                                }
                                if (algorithmTime < point.EndTime.Ticks)
                                {
                                    // set this to least advanced end point in time, pre rounding
                                    algorithmTime = point.EndTime.Ticks;
                                }
                            }
                            // add the list to the collection to be yielded
                            newData[i] = dataPoints;
                        }
                        else
                        {
                            //Should never fail:
                            Log.Error("DataStream.GetData(): Failed to dequeue bridge item");
                        }
                    }
                }

                if (newData.Count > 0)
                {
                    AlorithmTime = new DateTime(algorithmTime);
                    yield return newData;
                }

                //Update the frontier and start again.
                if (earlyBirdTicks > 0)
                {
                    //Seek forward in time to next data event from stream: there's nothing here for us to do now: why loop over empty seconds
                    frontier = new DateTime(earlyBirdTicks);
                }
                else if (feed.EndOfBridges)
                {
                    // we're out of data or quit
                    break;
                }

                //Allow loop pass through emits every second to allow event handling (liquidate/stop/ect...)
                if (Engine.LiveMode && DateTime.Now > nextEmitTime)
                {
                    AlorithmTime = DateTime.Now.RoundDown(periods.Min());
                    nextEmitTime = DateTime.Now + TimeSpan.FromSeconds(1);
                    yield return new Dictionary<int, List<BaseData>>();
                }
            }
            Log.Trace("DataStream.GetData(): All Streams Completed.");
        }
Esempio n. 33
0
 /// <summary>
 /// Check if all the bridges have data or are dead before starting the analysis
 /// 
 /// This determines whether or not the data stream can pull data from the data feed.
 /// </summary>
 /// <param name="feed">Feed Interface with concurrent connection between producer and consumer</param>
 /// <returns>Boolean true more data to download</returns>
 private static bool AllBridgesHaveData(IDataFeed feed)
 {
     //Lock on the bridge to scan if it has data:
     for (var i = 0; i < _subscriptions; i++)
     {
         if (feed.EndOfBridge[i]) continue;
         if (feed.Bridge[i].Count == 0)
         {
             return false;
         }
     }
     return true;
 }
Esempio n. 34
0
        /********************************************************
         * CLASS METHODS
         *********************************************************/
        /// <summary>
        /// Primary Analysis Thread:
        /// </summary>
        public static void Main(string[] args)
        {
            //Initialize:
            var    algorithmPath    = "";
            string mode             = "RELEASE";
            AlgorithmNodePacket job = null;
            var algorithm           = default(IAlgorithm);
            var startTime           = DateTime.Now;

            Log.LogHandler = Composer.Instance.GetExportedValueByTypeName <ILogHandler>(Config.Get("log-handler", "CompositeLogHandler"));

            #if DEBUG
            mode = "DEBUG";
            #endif

            //Name thread for the profiler:
            Thread.CurrentThread.Name = "Algorithm Analysis Thread";
            Log.Trace("Engine.Main(): LEAN ALGORITHMIC TRADING ENGINE v" + Constants.Version + " Mode: " + mode);
            Log.Trace("Engine.Main(): Started " + DateTime.Now.ToShortTimeString());
            Log.Trace("Engine.Main(): Memory " + OS.ApplicationMemoryUsed + "Mb-App  " + +OS.TotalPhysicalMemoryUsed + "Mb-Used  " + OS.TotalPhysicalMemory + "Mb-Total");

            //Import external libraries specific to physical server location (cloud/local)
            try
            {
                // grab the right export based on configuration
                Api      = Composer.Instance.GetExportedValueByTypeName <IApi>(Config.Get("api-handler"));
                Notify   = Composer.Instance.GetExportedValueByTypeName <IMessagingHandler>(Config.Get("messaging-handler"));
                JobQueue = Composer.Instance.GetExportedValueByTypeName <IJobQueueHandler>(Config.Get("job-queue-handler"));
            }
            catch (CompositionException compositionException)
            { Log.Error("Engine.Main(): Failed to load library: " + compositionException); }

            //Setup packeting, queue and controls system: These don't do much locally.
            Api.Initialize();
            Notify.Initialize();
            JobQueue.Initialize();

            //Start monitoring the backtest active status:
            var statusPingThread = new Thread(StateCheck.Ping.Run);
            statusPingThread.Start();

            do
            {
                try
                {
                    //Reset algo manager internal variables preparing for a new algorithm.
                    AlgorithmManager.ResetManager();

                    //Reset thread holders.
                    var    initializeComplete = false;
                    Thread threadFeed         = null;
                    Thread threadTransactions = null;
                    Thread threadResults      = null;
                    Thread threadRealTime     = null;

                    do
                    {
                        //-> Pull job from QuantConnect job queue, or, pull local build:
                        job = JobQueue.NextJob(out algorithmPath); // Blocking.

                        if (!IsLocal && LiveMode && (job.Version != Constants.Version || (job.Version == Constants.Version && job.Redelivered)))
                        {
                            //Tiny chance there was an uncontrolled collapse of a server, resulting in an old user task circulating.
                            //In this event kill the old algorithm and leave a message so the user can later review.
                            JobQueue.AcknowledgeJob(job);
                            Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, _collapseMessage);
                            Notify.SetChannel(job.Channel);
                            Notify.RuntimeError(job.AlgorithmId, _collapseMessage);
                            job = null;
                        }
                    } while (job == null);


                    //-> Initialize messaging system
                    Notify.SetChannel(job.Channel);

                    //-> Create SetupHandler to configure internal algorithm state:
                    SetupHandler = GetSetupHandler(job.SetupEndpoint);

                    //-> Set the result handler type for this algorithm job, and launch the associated result thread.
                    ResultHandler = GetResultHandler(job);
                    threadResults = new Thread(ResultHandler.Run, 0)
                    {
                        Name = "Result Thread"
                    };
                    threadResults.Start();

                    try
                    {
                        // Save algorithm to cache, load algorithm instance:
                        algorithm = SetupHandler.CreateAlgorithmInstance(algorithmPath);

                        //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method.
                        initializeComplete = SetupHandler.Setup(algorithm, out _brokerage, job);

                        //If there are any reasons it failed, pass these back to the IDE.
                        if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || SetupHandler.Errors.Count > 0)
                        {
                            initializeComplete = false;
                            //Get all the error messages: internal in algorithm and external in setup handler.
                            var errorMessage = String.Join(",", algorithm.ErrorMessages);
                            errorMessage += String.Join(",", SetupHandler.Errors);
                            throw new Exception(errorMessage);
                        }
                    }
                    catch (Exception err)
                    {
                        var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err.StackTrace;
                        ResultHandler.RuntimeError(runtimeMessage, err.StackTrace);
                        Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage);
                    }

                    //-> Using the job + initialization: load the designated handlers:
                    if (initializeComplete)
                    {
                        //-> Reset the backtest stopwatch; we're now running the algorithm.
                        startTime = DateTime.Now;

                        //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates.
                        algorithm.SetAlgorithmId(job.AlgorithmId);
                        algorithm.SetLiveMode(LiveMode);
                        algorithm.SetLocked();

                        //Load the associated handlers for data, transaction and realtime events:
                        ResultHandler.SetAlgorithm(algorithm);
                        DataFeed           = GetDataFeedHandler(algorithm, job);
                        TransactionHandler = GetTransactionHandler(algorithm, _brokerage, ResultHandler, job);
                        RealTimeHandler    = GetRealTimeHandler(algorithm, _brokerage, DataFeed, ResultHandler, job);

                        //Set the error handlers for the brokerage asynchronous errors.
                        SetupHandler.SetupErrorHandler(ResultHandler, _brokerage);

                        //Send status to user the algorithm is now executing.
                        ResultHandler.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running);

                        //Launch the data, transaction and realtime handlers into dedicated threads
                        threadFeed = new Thread(DataFeed.Run, 0)
                        {
                            Name = "DataFeed Thread"
                        };
                        threadTransactions = new Thread(TransactionHandler.Run, 0)
                        {
                            Name = "Transaction Thread"
                        };
                        threadRealTime = new Thread(RealTimeHandler.Run, 0)
                        {
                            Name = "RealTime Thread"
                        };

                        //Launch the data feed, result sending, and transaction models/handlers in separate threads.
                        threadFeed.Start();         // Data feed pushing data packets into thread bridge;
                        threadTransactions.Start(); // Transaction modeller scanning new order requests
                        threadRealTime.Start();     // RealTime scan time for time based events:
                        // Result manager scanning message queue: (started earlier)

                        try
                        {
                            // Execute the Algorithm Code:
                            var complete = Isolator.ExecuteWithTimeLimit(SetupHandler.MaximumRuntime, () =>
                            {
                                try
                                {
                                    //Run Algorithm Job:
                                    // -> Using this Data Feed,
                                    // -> Send Orders to this TransactionHandler,
                                    // -> Send Results to ResultHandler.
                                    AlgorithmManager.Run(job, algorithm, DataFeed, TransactionHandler, ResultHandler, SetupHandler, RealTimeHandler);
                                }
                                catch (Exception err)
                                {
                                    //Debugging at this level is difficult, stack trace needed.
                                    Log.Error("Engine.Run(): Error in Algo Manager: " + err.Message + " ST >> " + err.StackTrace);
                                }

                                Log.Trace("Engine.Run(): Exiting Algorithm Manager");
                            }, MaximumRamAllocation);

                            if (!complete)
                            {
                                Log.Error("Engine.Main(): Failed to complete in time: " + SetupHandler.MaximumRuntime.ToString("F"));
                                throw new Exception("Failed to complete algorithm within " + SetupHandler.MaximumRuntime.ToString("F") + " seconds. Please make it run faster.");
                            }

                            // Algorithm runtime error:
                            if (algorithm.RunTimeError != null)
                            {
                                throw algorithm.RunTimeError;
                            }
                        }
                        catch (Exception err)
                        {
                            //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed.
                            Log.Error("Engine.Run(): Breaking out of parent try-catch: " + err.Message + " " + err.StackTrace);
                            if (DataFeed != null)
                            {
                                DataFeed.Exit();
                            }
                            if (ResultHandler != null)
                            {
                                var message = "Runtime Error: " + err.Message;
                                Log.Trace("Engine.Run(): Sending runtime error to user...");
                                ResultHandler.LogMessage(message);
                                ResultHandler.RuntimeError(message, err.StackTrace);
                                Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, message + " Stack Trace: " + err.StackTrace);
                            }
                        }

                        //Send result data back: this entire code block could be rewritten.
                        // todo: - Split up statistics class, its enormous.
                        // todo: - Make a dedicated Statistics.Benchmark class.
                        // todo: - Move all creation and transmission of statistics out of primary engine loop.
                        // todo: - Statistics.Generate(algorithm, resulthandler, transactionhandler);

                        try
                        {
                            var charts     = new Dictionary <string, Chart>(ResultHandler.Charts);
                            var orders     = new Dictionary <int, Order>(algorithm.Transactions.Orders);
                            var holdings   = new Dictionary <string, Holding>();
                            var statistics = new Dictionary <string, string>();
                            var banner     = new Dictionary <string, string>();

                            try
                            {
                                //Generates error when things don't exist (no charting logged, runtime errors in main algo execution)
                                const string strategyEquityKey   = "Strategy Equity";
                                const string equityKey           = "Equity";
                                const string dailyPerformanceKey = "Daily Performance";

                                // make sure we've taken samples for these series before just blindly requesting them
                                if (charts.ContainsKey(strategyEquityKey) &&
                                    charts[strategyEquityKey].Series.ContainsKey(equityKey) &&
                                    charts[strategyEquityKey].Series.ContainsKey(dailyPerformanceKey))
                                {
                                    var equity      = charts[strategyEquityKey].Series[equityKey].Values;
                                    var performance = charts[strategyEquityKey].Series[dailyPerformanceKey].Values;
                                    var profitLoss  =
                                        new SortedDictionary <DateTime, decimal>(algorithm.Transactions.TransactionRecord);
                                    statistics = Statistics.Statistics.Generate(equity, profitLoss, performance,
                                                                                SetupHandler.StartingCapital, 252);
                                }
                            }
                            catch (Exception err)
                            {
                                Log.Error("Algorithm.Node.Engine(): Error generating statistics packet: " + err.Message);
                            }

                            //Diagnostics Completed, Send Result Packet:
                            var totalSeconds = (DateTime.Now - startTime).TotalSeconds;
                            ResultHandler.DebugMessage(string.Format("Algorithm Id:({0}) completed in {1} seconds at {2}k data points per second. Processing total of {3} data points.",
                                                                     job.AlgorithmId, totalSeconds.ToString("F2"), ((AlgorithmManager.DataPoints / (double)1000) / totalSeconds).ToString("F0"), AlgorithmManager.DataPoints.ToString("N0")));

                            ResultHandler.SendFinalResult(job, orders, algorithm.Transactions.TransactionRecord, holdings, statistics, banner);
                        }
                        catch (Exception err)
                        {
                            Log.Error("Engine.Main(): Error sending analysis result: " + err.Message + "  ST >> " + err.StackTrace);
                        }

                        //Before we return, send terminate commands to close up the threads
                        TransactionHandler.Exit();
                        DataFeed.Exit();
                        RealTimeHandler.Exit();
                    }

                    //Close result handler:
                    ResultHandler.Exit();

                    //Wait for the threads to complete:
                    var ts = Stopwatch.StartNew();
                    while ((ResultHandler.IsActive || (TransactionHandler != null && TransactionHandler.IsActive) || (DataFeed != null && DataFeed.IsActive)) && ts.ElapsedMilliseconds < 30 * 1000)
                    {
                        Thread.Sleep(100); Log.Trace("Waiting for threads to exit...");
                    }
                    if (threadFeed != null && threadFeed.IsAlive)
                    {
                        threadFeed.Abort();
                    }
                    if (threadTransactions != null && threadTransactions.IsAlive)
                    {
                        threadTransactions.Abort();
                    }
                    if (threadResults != null && threadResults.IsAlive)
                    {
                        threadResults.Abort();
                    }
                    Log.Trace("Engine.Main(): Analysis Completed and Results Posted.");
                }
                catch (Exception err)
                {
                    Log.Error("Engine.Main(): Error running algorithm: " + err.Message + " >> " + err.StackTrace);
                }
                finally
                {
                    //Delete the message from the job queue:
                    JobQueue.AcknowledgeJob(job);
                    Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId);

                    //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions:
                    if (LiveMode && AlgorithmManager.State != AlgorithmStatus.Running && AlgorithmManager.State != AlgorithmStatus.RuntimeError)
                    {
                        Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmManager.State);
                    }

                    //Attempt to clean up ram usage:
                    GC.Collect();
                }
                //If we're running locally will execute just once.
            } while (!IsLocal);

            // Send the exit signal and then kill the thread
            StateCheck.Ping.Exit();

            // Make the console window pause so we can read log output before exiting and killing the application completely
            Console.Read();

            //Finally if ping thread still not complete, kill.
            if (statusPingThread != null && statusPingThread.IsAlive)
            {
                statusPingThread.Abort();
            }

            if (Log.LogHandler != null)
            {
                Log.LogHandler.Dispose();
            }
        }
Esempio n. 35
0
 /// <summary>
 /// Initializes a new instance of the <see cref="UniverseSelection"/> class
 /// </summary>
 /// <param name="dataFeed">The data feed to add/remove subscriptions from</param>
 /// <param name="algorithm">The algorithm to add securities to</param>
 public UniverseSelection(IDataFeed dataFeed, IAlgorithm algorithm)
 {
     _dataFeed = dataFeed;
     _algorithm = algorithm;
 }
Esempio n. 36
0
        /// <summary>
        /// Initialize the result handler with this result packet.
        /// </summary>
        /// <param name="job">Algorithm job packet for this result handler</param>
        /// <param name="messagingHandler">The handler responsible for communicating messages to listeners</param>
        /// <param name="api">The api instance used for handling logs</param>
        /// <param name="dataFeed"></param>
        /// <param name="setupHandler"></param>
        /// <param name="transactionHandler"></param>
        public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
        {
            _api = api;
            _messagingHandler   = messagingHandler;
            _transactionHandler = transactionHandler;
            _job = (BacktestNodePacket)job;
            if (_job == null)
            {
                throw new Exception("BacktestingResultHandler.Constructor(): Submitted Job type invalid.");
            }
            _compileId  = _job.CompileId;
            _backtestId = _job.BacktestId;

            //Get the resample period:
            var totalMinutes    = (_job.PeriodFinish - _job.PeriodStart).TotalMinutes;
            var resampleMinutes = (totalMinutes < (_minimumSamplePeriod * _samples)) ? _minimumSamplePeriod : (totalMinutes / _samples); // Space out the sampling every

            _resamplePeriod = TimeSpan.FromMinutes(resampleMinutes);
            Log.Trace("BacktestingResultHandler(): Sample Period Set: " + resampleMinutes.ToString("00.00"));
        }
Esempio n. 37
0
 public AlgorithmStub(IDataFeed dataFeed)
 {
     DataManager = new DataManagerStub(dataFeed, this);
     SubscriptionManager.SetDataManager(DataManager);
 }
Esempio n. 38
0
 public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
 {
     _shadow.Initialize(job, messagingHandler, api, dataFeed, setupHandler, transactionHandler);
 }
Esempio n. 39
0
        /// <summary>
        /// Waits until the data feed is ready for the data stream to pull data from it.
        /// </summary>
        /// <param name="feed">The IDataFeed instance populating the bridges</param>
        /// <param name="dataStreamFrontier">The frontier of the data stream</param>
        private static void WaitForDataOrEndOfBridges(IDataFeed feed, DateTime dataStreamFrontier)
        {
            //Make sure all bridges have data to to peek sync properly.
            var now = Stopwatch.StartNew();

            // timeout to prevent infinite looping here -- 2sec for live and 30sec for non-live
            var loopTimeout = (Engine.LiveMode) ? 50 : 30000;

            if (Engine.LiveMode)
            {
                // give some time to the other threads in live mode
                Thread.Sleep(1);
            }

            while (!AllBridgesHaveData(feed) && now.ElapsedMilliseconds < loopTimeout)
            {
                Thread.Sleep(1);
            }

            //we want to verify that our data stream is never ahead of our data feed.
            //this acts as a virtual lock around the bridge so we can wait for the feed
            //to be ahead of us
            // if we're out of data then the feed will never update (it will stay here forever if there's no more data, so use a timeout!!)
            while (dataStreamFrontier > feed.LoadedDataFrontier && !feed.EndOfBridges && now.ElapsedMilliseconds < loopTimeout)
            {
                Thread.Sleep(1);
            }
        }
Esempio n. 40
0
        /// <summary>
        /// Creates a new instance of the DataManager
        /// </summary>
        public DataManager(
            IDataFeed dataFeed,
            UniverseSelection universeSelection,
            IAlgorithm algorithm,
            ITimeKeeper timeKeeper,
            MarketHoursDatabase marketHoursDatabase,
            bool liveMode,
            IRegisteredSecurityDataTypesProvider registeredTypesProvider)
        {
            _dataFeed         = dataFeed;
            UniverseSelection = universeSelection;
            UniverseSelection.SetDataManager(this);
            _algorithmSettings       = algorithm.Settings;
            AvailableDataTypes       = SubscriptionManager.DefaultDataTypes();
            _timeKeeper              = timeKeeper;
            _marketHoursDatabase     = marketHoursDatabase;
            _liveMode                = liveMode;
            _registeredTypesProvider = registeredTypesProvider;

            // wire ourselves up to receive notifications when universes are added/removed
            algorithm.UniverseManager.CollectionChanged += (sender, args) =>
            {
                switch (args.Action)
                {
                case NotifyCollectionChangedAction.Add:
                    foreach (var universe in args.NewItems.OfType <Universe>())
                    {
                        var config = universe.Configuration;
                        var start  = algorithm.UtcTime;

                        var end = algorithm.LiveMode ? Time.EndOfTime
                                : algorithm.EndDate.ConvertToUtc(algorithm.TimeZone);

                        Security security;
                        if (!algorithm.Securities.TryGetValue(config.Symbol, out security))
                        {
                            // create a canonical security object if it doesn't exist
                            security = new Security(
                                _marketHoursDatabase.GetExchangeHours(config),
                                config,
                                algorithm.Portfolio.CashBook[algorithm.AccountCurrency],
                                SymbolProperties.GetDefault(algorithm.AccountCurrency),
                                algorithm.Portfolio.CashBook,
                                RegisteredSecurityDataTypesProvider.Null,
                                new SecurityCache()
                                );
                        }
                        AddSubscription(
                            new SubscriptionRequest(true,
                                                    universe,
                                                    security,
                                                    config,
                                                    start,
                                                    end));
                    }
                    break;

                case NotifyCollectionChangedAction.Remove:
                    foreach (var universe in args.OldItems.OfType <Universe>())
                    {
                        // removing the subscription will be handled by the SubscriptionSynchronizer
                        // in the next loop as well as executing a UniverseSelection one last time.
                        if (!universe.DisposeRequested)
                        {
                            universe.Dispose();
                        }
                    }
                    break;

                default:
                    throw new NotImplementedException("The specified action is not implemented: " + args.Action);
                }
            };
        }
Esempio n. 41
0
        /// <summary>
        /// Initialize the result handler with this result packet.
        /// </summary>
        /// <param name="job">Algorithm job packet for this result handler</param>
        /// <param name="messagingHandler">The handler responsible for communicating messages to listeners</param>
        /// <param name="api">The api instance used for handling logs</param>
        /// <param name="dataFeed"></param>
        /// <param name="setupHandler"></param>
        /// <param name="transactionHandler"></param>
        public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
        {
            _api = api;
            _messagingHandler = messagingHandler;
            _transactionHandler = transactionHandler;
            _job = (BacktestNodePacket)job;
            if (_job == null) throw new Exception("BacktestingResultHandler.Constructor(): Submitted Job type invalid.");
            _compileId = _job.CompileId;
            _backtestId = _job.BacktestId;

            //Get the resample period:
            var totalMinutes = (_job.PeriodFinish - _job.PeriodStart).TotalMinutes;
            var resampleMinutes = (totalMinutes < (_minimumSamplePeriod * _samples)) ? _minimumSamplePeriod : (totalMinutes / _samples); // Space out the sampling every
            _resamplePeriod = TimeSpan.FromMinutes(resampleMinutes);
            Log.Trace("BacktestingResultHandler(): Sample Period Set: " + resampleMinutes.ToString("00.00"));
        }
Esempio n. 42
0
 /// <summary>
 /// Initializes a new instance of the <see cref="UniverseSelection"/> class
 /// </summary>
 /// <param name="dataFeed">The data feed to add/remove subscriptions from</param>
 /// <param name="algorithm">The algorithm to add securities to</param>
 /// <param name="controls">Specifies limits on the algorithm's memory usage</param>
 public UniverseSelection(IDataFeed dataFeed, IAlgorithm algorithm, Controls controls)
 {
     _dataFeed  = dataFeed;
     _algorithm = algorithm;
     _limiter   = new SubscriptionLimiter(() => dataFeed.Subscriptions, controls.TickLimit, controls.SecondLimit, controls.MinuteLimit);
 }
Esempio n. 43
0
 /// <summary>
 /// Initializes a new instance of the <see cref="UniverseSelection"/> class
 /// </summary>
 /// <param name="dataFeed">The data feed to add/remove subscriptions from</param>
 /// <param name="algorithm">The algorithm to add securities to</param>
 /// <param name="controls">Specifies limits on the algorithm's memory usage</param>
 public UniverseSelection(IDataFeed dataFeed, IAlgorithm algorithm, Controls controls)
 {
     _dataFeed = dataFeed;
     _algorithm = algorithm;
     _limiter = new SubscriptionLimiter(() => dataFeed.Subscriptions, controls.TickLimit, controls.SecondLimit, controls.MinuteLimit);
 }
Esempio n. 44
0
 private void ConsumeBridge(IDataFeed feed, TimeSpan timeout, Action <TimeSlice> handler, bool sendUniverseData = false)
 {
     ConsumeBridge(feed, timeout, false, handler, sendUniverseData: sendUniverseData);
 }
 /// <summary>
 /// Initializes a new instance of the <see cref="UniverseSelection"/> class
 /// </summary>
 /// <param name="dataFeed">The data feed to add/remove subscriptions from</param>
 /// <param name="algorithm">The algorithm to add securities to</param>
 /// <param name="isLiveMode">True for live mode, false for back test mode</param>
 public UniverseSelection(IDataFeed dataFeed, IAlgorithm algorithm, bool isLiveMode)
 {
     _dataFeed = dataFeed;
     _algorithm = algorithm;
 }
Esempio n. 46
0
        /********************************************************
         * CLASS METHODS
         *********************************************************/
        /// <summary>
        /// Launch the algorithm manager to run this strategy
        /// </summary>
        /// <param name="job">Algorithm job</param>
        /// <param name="algorithm">Algorithm instance</param>
        /// <param name="feed">Datafeed object</param>
        /// <param name="transactions">Transaction manager object</param>
        /// <param name="results">Result handler object</param>
        /// <param name="setup">Setup handler object</param>
        /// <param name="realtime">Realtime processing object</param>
        /// <remarks>Modify with caution</remarks>
        public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime)
        {
            //Initialize:
            _dataPointCount = 0;
            var startingPortfolioValue = setup.StartingPortfolioValue;
            var backtestMode           = (job.Type == PacketType.BacktestNode);
            var methodInvokers         = new Dictionary <Type, MethodInvoker>();
            var marginCallFrequency    = TimeSpan.FromMinutes(5);
            var nextMarginCallTime     = DateTime.MinValue;

            //Initialize Properties:
            _algorithmId    = job.AlgorithmId;
            _algorithmState = AlgorithmStatus.Running;
            _previousTime   = setup.StartingDate.Date;

            //Create the method accessors to push generic types into algorithm: Find all OnData events:

            // Algorithm 1.0 data accessors
            var hasOnTradeBar = AddMethodInvoker <Dictionary <string, TradeBar> >(algorithm, methodInvokers, "OnTradeBar");
            var hasOnTick     = AddMethodInvoker <Dictionary <string, List <Tick> > >(algorithm, methodInvokers, "OnTick");

            // Algorithm 2.0 data accessors
            var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers);
            var hasOnDataTicks     = AddMethodInvoker <Ticks>(algorithm, methodInvokers);

            // determine what mode we're in
            var backwardsCompatibilityMode = !hasOnDataTradeBars && !hasOnDataTicks;

            // dividend and split events
            var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers);
            var hasOnDataSplits    = AddMethodInvoker <Splits>(algorithm, methodInvokers);

            //Go through the subscription types and create invokers to trigger the event handlers for each custom type:
            foreach (var config in feed.Subscriptions)
            {
                //If type is a tradebar, combine tradebars and ticks into unified array:
                if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick")
                {
                    //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. }
                    var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type });

                    //If we already have this Type-handler then don't add it to invokers again.
                    if (methodInvokers.ContainsKey(config.Type))
                    {
                        continue;
                    }

                    //If we couldnt find the event handler, let the user know we can't fire that event.
                    if (genericMethod == null)
                    {
                        algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) {  }");
                        _algorithmState        = AlgorithmStatus.RuntimeError;
                        return;
                    }
                    methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod());
                }
            }

            //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm.
            Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop.");
            foreach (var newData in DataStream.GetData(feed, setup.StartingDate))
            {
                //Check this backtest is still running:
                if (_algorithmState != AlgorithmStatus.Running)
                {
                    break;
                }

                //Execute with TimeLimit Monitor:
                if (Isolator.IsCancellationRequested)
                {
                    return;
                }

                var time = DataStream.AlorithmTime;

                //If we're in backtest mode we need to capture the daily performance. We do this here directly
                //before updating the algorithm state with the new data from this time step, otherwise we'll
                //produce incorrect samples (they'll take into account this time step's new price values)
                if (backtestMode)
                {
                    //Refresh the realtime event monitor:
                    //in backtest mode use the algorithms clock as realtime.
                    realtime.SetTime(time);

                    //On day-change sample equity and daily performance for statistics calculations
                    if (_previousTime.Date != time.Date)
                    {
                        //Sample the portfolio value over time for chart.
                        results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));

                        //Check for divide by zero
                        if (startingPortfolioValue == 0m)
                        {
                            results.SamplePerformance(_previousTime.Date, 0);
                        }
                        else
                        {
                            results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10));
                        }
                        startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue;
                    }
                }

                //Update algorithm state after capturing performance from previous day

                //On each time step push the real time prices to the cashbook so we can have updated conversion rates
                algorithm.Portfolio.CashBook.Update(newData);

                //Update the securities properties: first before calling user code to avoid issues with data
                algorithm.Securities.Update(time, newData);

                // perform margin calls, in live mode we can also use realtime to emit these
                if (time >= nextMarginCallTime || (Engine.LiveMode && nextMarginCallTime > DateTime.Now))
                {
                    // determine if there are possible margin call orders to be executed
                    var marginCallOrders = algorithm.Portfolio.ScanForMarginCall();
                    if (marginCallOrders.Count != 0)
                    {
                        // execute the margin call orders
                        var executedOrders = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders);
                        foreach (var order in executedOrders)
                        {
                            algorithm.Error(string.Format("Executed MarginCallOrder: {0} - Quantity: {1} @ {2}", order.Symbol, order.Quantity, order.Price));
                        }
                    }

                    nextMarginCallTime = time + marginCallFrequency;
                }


                //Check if the user's signalled Quit: loop over data until day changes.
                if (algorithm.GetQuit())
                {
                    _algorithmState = AlgorithmStatus.Quit;
                    break;
                }
                if (algorithm.RunTimeError != null)
                {
                    _algorithmState = AlgorithmStatus.RuntimeError;
                    break;
                }

                //Pass in the new time first:
                algorithm.SetDateTime(time);

                //Trigger the data events: Invoke the types we have data for:
                var oldBars      = new Dictionary <string, TradeBar>();
                var oldTicks     = new Dictionary <string, List <Tick> >();
                var newBars      = new TradeBars(time);
                var newTicks     = new Ticks(time);
                var newDividends = new Dividends(time);
                var newSplits    = new Splits(time);

                //Invoke all non-tradebars, non-ticks methods and build up the TradeBars and Ticks dictionaries
                // --> i == Subscription Configuration Index, so we don't need to compare types.
                foreach (var i in newData.Keys)
                {
                    //Data point and config of this point:
                    var dataPoints = newData[i];
                    var config     = feed.Subscriptions[i];

                    //Keep track of how many data points we've processed
                    _dataPointCount += dataPoints.Count;

                    //We don't want to pump data that we added just for currency conversions
                    if (config.IsInternalFeed)
                    {
                        continue;
                    }

                    //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop.
                    //  Aggregate Dividends and Splits -- invoke portfolio application methods
                    foreach (var dataPoint in dataPoints)
                    {
                        var dividend = dataPoint as Dividend;
                        if (dividend != null)
                        {
                            Log.Trace("AlgorithmManager.Run(): Applying Dividend for " + dividend.Symbol);
                            // if this is a dividend apply to portfolio
                            algorithm.Portfolio.ApplyDividend(dividend);
                            if (hasOnDataDividends)
                            {
                                // and add to our data dictionary to pump into OnData(Dividends data)
                                newDividends.Add(dividend);
                            }
                            continue;
                        }

                        var split = dataPoint as Split;
                        if (split != null)
                        {
                            Log.Trace("AlgorithmManager.Run(): Applying Split for " + split.Symbol);

                            // if this is a split apply to portfolio
                            algorithm.Portfolio.ApplySplit(split);
                            if (hasOnDataSplits)
                            {
                                // and add to our data dictionary to pump into OnData(Splits data)
                                newSplits.Add(split);
                            }
                            continue;
                        }

                        //Update registered consolidators for this symbol index
                        try
                        {
                            for (var j = 0; j < config.Consolidators.Count; j++)
                            {
                                config.Consolidators[j].Update(dataPoint);
                            }
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithmState        = AlgorithmStatus.RuntimeError;
                            Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err.Message);
                            return;
                        }

                        // TRADEBAR -- add to our dictionary
                        var bar = dataPoint as TradeBar;
                        if (bar != null)
                        {
                            try
                            {
                                if (backwardsCompatibilityMode)
                                {
                                    oldBars[bar.Symbol] = bar;
                                }
                                else
                                {
                                    newBars[bar.Symbol] = bar;
                                }
                            }
                            catch (Exception err)
                            {
                                Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> "
                                          + bar.Value.ToString("C"));
                                Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString()
                                          + ") Count:(" + newBars.Count + ") " + err.Message);
                            }
                            continue;
                        }
                        // TICK -- add to our dictionary
                        var tick = dataPoint as Tick;
                        if (tick != null)
                        {
                            if (backwardsCompatibilityMode)
                            {
                                List <Tick> ticks;
                                if (!oldTicks.TryGetValue(tick.Symbol, out ticks))
                                {
                                    ticks = new List <Tick>(3);
                                    oldTicks.Add(tick.Symbol, ticks);
                                }
                                ticks.Add(tick);
                            }
                            else
                            {
                                List <Tick> ticks;
                                if (!newTicks.TryGetValue(tick.Symbol, out ticks))
                                {
                                    ticks = new List <Tick>(3);
                                    newTicks.Add(tick.Symbol, ticks);
                                }
                                ticks.Add(tick);
                            }
                            continue;
                        }

                        // if it was nothing else then it must be custom data

                        // CUSTOM DATA -- invoke on data method
                        //Send data into the generic algorithm event handlers
                        try
                        {
                            methodInvokers[config.Type](algorithm, dataPoint);
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithmState        = AlgorithmStatus.RuntimeError;
                            Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }
                }

                try
                {
                    // fire off the dividend and split events before pricing events
                    if (hasOnDataDividends && newDividends.Count != 0)
                    {
                        methodInvokers[typeof(Dividends)](algorithm, newDividends);
                    }
                    if (hasOnDataSplits && newSplits.Count != 0)
                    {
                        methodInvokers[typeof(Splits)](algorithm, newSplits);
                    }
                }
                catch (Exception err)
                {
                    algorithm.RunTimeError = err;
                    _algorithmState        = AlgorithmStatus.RuntimeError;
                    Log.Debug("AlgorithmManager.Run(): RuntimeError: Dividends/Splits: " + err.Message + " STACK >>> " + err.StackTrace);
                    return;
                }

                //After we've fired all other events in this second, fire the pricing events:
                if (backwardsCompatibilityMode)
                {
                    //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers...");
                    try
                    {
                        if (hasOnTradeBar && oldBars.Count > 0)
                        {
                            methodInvokers[typeof(Dictionary <string, TradeBar>)](algorithm, oldBars);
                        }
                        if (hasOnTick && oldTicks.Count > 0)
                        {
                            methodInvokers[typeof(Dictionary <string, List <Tick> >)](algorithm, oldTicks);
                        }
                    }
                    catch (Exception err)
                    {
                        algorithm.RunTimeError = err;
                        _algorithmState        = AlgorithmStatus.RuntimeError;
                        Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                        return;
                    }
                }
                else
                {
                    //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers...");
                    try
                    {
                        if (hasOnDataTradeBars && newBars.Count > 0)
                        {
                            methodInvokers[typeof(TradeBars)](algorithm, newBars);
                        }
                        if (hasOnDataTicks && newTicks.Count > 0)
                        {
                            methodInvokers[typeof(Ticks)](algorithm, newTicks);
                        }
                    }
                    catch (Exception err)
                    {
                        algorithm.RunTimeError = err;
                        _algorithmState        = AlgorithmStatus.RuntimeError;
                        Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                        return;
                    }
                }

                //If its the historical/paper trading models, wait until market orders have been "filled"
                // Manually trigger the event handler to prevent thread switch.
                transactions.ProcessSynchronousEvents();

                //Save the previous time for the sample calculations
                _previousTime = time;

                // Process any required events of the results handler such as sampling assets, equity, or stock prices.
                results.ProcessSynchronousEvents();
            } // End of ForEach DataStream

            //Stream over:: Send the final packet and fire final events:
            Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm...");
            try
            {
                algorithm.OnEndOfAlgorithm();
            }
            catch (Exception err)
            {
                _algorithmState        = AlgorithmStatus.RuntimeError;
                algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException);
                Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace);
                return;
            }

            // Process any required events of the results handler such as sampling assets, equity, or stock prices.
            results.ProcessSynchronousEvents(forceProcess: true);

            //Liquidate Holdings for Calculations:
            if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode)
            {
                // without this we can't liquidate equities since the exchange is 'technically' closed
                var hackedFrontier = algorithm.Time.AddMilliseconds(-1);
                algorithm.SetDateTime(hackedFrontier);
                foreach (var security in algorithm.Securities)
                {
                    security.Value.SetMarketPrice(hackedFrontier, null);
                }

                Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings...");
                algorithm.Liquidate();
                results.LogMessage("Algorithm Liquidated");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated);
            }

            //Manually stopped the algorithm
            if (_algorithmState == AlgorithmStatus.Stopped)
            {
                Log.Trace("AlgorithmManager.Run(): Stopping algorithm...");
                results.LogMessage("Algorithm Stopped");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped);
            }

            //Backtest deleted.
            if (_algorithmState == AlgorithmStatus.Deleted)
            {
                Log.Trace("AlgorithmManager.Run(): Deleting algorithm...");
                results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request.");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted);
            }

            //Algorithm finished, send regardless of commands:
            results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed);

            //Take final samples:
            results.SampleRange(algorithm.GetChartUpdates());
            results.SampleEquity(DataStream.AlorithmTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));
            results.SamplePerformance(DataStream.AlorithmTime, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10));
        } // End of Run();
Esempio n. 47
0
 /// <summary>
 /// Initialize the result handler with this result packet.
 /// </summary>
 /// <param name="job">Algorithm job packet for this result handler</param>
 /// <param name="messagingHandler"></param>
 /// <param name="api"></param>
 /// <param name="dataFeed"></param>
 /// <param name="setupHandler"></param>
 /// <param name="transactionHandler"></param>
 public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
 {
     //Redirect the log messages here:
     _job = job;
     var desktopLogging = new FunctionalLogHandler(DebugMessage, DebugMessage, ErrorMessage);
     Log.LogHandler = new CompositeLogHandler(new[] { desktopLogging, Log.LogHandler });
 }
Esempio n. 48
0
 private static void ConsumeBridge(IDataFeed feed, Action <TimeSlice> handler)
 {
     ConsumeBridge(feed, TimeSpan.FromSeconds(10), handler);
 }
Esempio n. 49
0
        /// <summary>
        /// Select the realtime event handler set in the job.
        /// </summary>
        private static IRealTimeHandler GetRealTimeHandler(IAlgorithm algorithm, IBrokerage brokerage, IDataFeed feed, IResultHandler results, AlgorithmNodePacket job)
        {
            var rth = default(IRealTimeHandler);
            switch (job.RealTimeEndpoint)
            {
                //Don't fire based on system time but virtualized backtesting time.
                case RealTimeEndpoint.Backtesting:
                    Log.Trace("Engine.GetRealTimeHandler(): Selected Backtesting RealTimeEvent Handler");
                    rth = new BacktestingRealTimeHandler(algorithm, job);
                    break;

                // Fire events based on real system clock time.
                case RealTimeEndpoint.LiveTrading:
                    Log.Trace("Engine.GetRealTimeHandler(): Selected LiveTrading RealTimeEvent Handler");
                    rth = new LiveTradingRealTimeHandler(algorithm, feed, results);
                    break;
            }
            return rth;
        }
Esempio n. 50
0
 private static void ConsumeBridge(IDataFeed feed, TimeSpan timeout, Action <TimeSlice> handler)
 {
     ConsumeBridge(feed, timeout, false, handler);
 }
Esempio n. 51
0
 private static void ConsumeBridge(IDataFeed feed, TimeSpan timeout, Action<TimeSlice> handler)
 {
     ConsumeBridge(feed, timeout, false, handler);
 }
Esempio n. 52
0
        /// <summary>
        /// Initialize the result handler with this result packet.
        /// </summary>
        /// <param name="job">Algorithm job packet for this result handler</param>
        /// <param name="messagingHandler"></param>
        /// <param name="api"></param>
        /// <param name="dataFeed"></param>
        /// <param name="setupHandler"></param>
        /// <param name="transactionHandler"></param>
        public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
        {
            //Redirect the log messages here:
            _job = job;
            var desktopLogging = new FunctionalLogHandler(DebugMessage, DebugMessage, ErrorMessage);

            Log.LogHandler = new CompositeLogHandler(new[] { desktopLogging, Log.LogHandler });
        }
Esempio n. 53
0
        /// <summary>
        /// Initialize the result handler with this result packet.
        /// </summary>
        /// <param name="packet">Algorithm job packet for this result handler</param>
        /// <param name="messagingHandler"></param>
        /// <param name="api"></param>
        /// <param name="dataFeed"></param>
        /// <param name="setupHandler"></param>
        /// <param name="transactionHandler"></param>
        public void Initialize(AlgorithmNodePacket packet, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
        {
            // we expect one of two types here, the backtest node packet or the live node packet
            var job = packet as BacktestNodePacket;
            if (job != null)
            {
                _algorithmNode = new BacktestConsoleStatusHandler(job);
            }
            else
            {
                var live = packet as LiveNodePacket;
                if (live == null)
                {
                    throw new ArgumentException("Unexpected AlgorithmNodeType: " + packet.GetType().Name);
                }
                _algorithmNode = new LiveConsoleStatusHandler(live);
            }
            _resamplePeriod = _algorithmNode.ComputeSampleEquityPeriod();

            var time = DateTime.Now.ToString("yyyy-MM-dd-HH-mm");
            _chartDirectory = Path.Combine("../../../Charts/", packet.AlgorithmId, time);
            if (Directory.Exists(_chartDirectory))
            {
                foreach (var file in Directory.EnumerateFiles(_chartDirectory, "*.csv", SearchOption.AllDirectories))
                {
                    File.Delete(file);
                }
                Directory.Delete(_chartDirectory, true);
            }
            Directory.CreateDirectory(_chartDirectory);
            _messagingHandler = messagingHandler; 

        }
Esempio n. 54
0
        /// <summary>
        /// Launch the algorithm manager to run this strategy
        /// </summary>
        /// <param name="job">Algorithm job</param>
        /// <param name="algorithm">Algorithm instance</param>
        /// <param name="feed">Datafeed object</param>
        /// <param name="transactions">Transaction manager object</param>
        /// <param name="results">Result handler object</param>
        /// <param name="realtime">Realtime processing object</param>
        /// <param name="commands">The command queue for relaying extenal commands to the algorithm</param>
        /// <param name="token">Cancellation token</param>
        /// <remarks>Modify with caution</remarks>
        public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, ICommandQueueHandler commands, CancellationToken token)
        {
            //Initialize:
            _dataPointCount = 0;
            _algorithm      = algorithm;
            var portfolioValue          = algorithm.Portfolio.TotalPortfolioValue;
            var backtestMode            = (job.Type == PacketType.BacktestNode);
            var methodInvokers          = new Dictionary <Type, MethodInvoker>();
            var marginCallFrequency     = TimeSpan.FromMinutes(5);
            var nextMarginCallTime      = DateTime.MinValue;
            var settlementScanFrequency = TimeSpan.FromMinutes(30);
            var nextSettlementScanTime  = DateTime.MinValue;

            var delistings = new List <Delisting>();

            //Initialize Properties:
            _algorithmId      = job.AlgorithmId;
            _algorithm.Status = AlgorithmStatus.Running;
            _previousTime     = algorithm.StartDate.Date;

            //Create the method accessors to push generic types into algorithm: Find all OnData events:

            // Algorithm 2.0 data accessors
            var hasOnDataTradeBars    = AddMethodInvoker <TradeBars>(algorithm, methodInvokers);
            var hasOnDataQuoteBars    = AddMethodInvoker <QuoteBars>(algorithm, methodInvokers);
            var hasOnDataOptionChains = AddMethodInvoker <OptionChains>(algorithm, methodInvokers);
            var hasOnDataTicks        = AddMethodInvoker <Ticks>(algorithm, methodInvokers);

            // dividend and split events
            var hasOnDataDividends           = AddMethodInvoker <Dividends>(algorithm, methodInvokers);
            var hasOnDataSplits              = AddMethodInvoker <Splits>(algorithm, methodInvokers);
            var hasOnDataDelistings          = AddMethodInvoker <Delistings>(algorithm, methodInvokers);
            var hasOnDataSymbolChangedEvents = AddMethodInvoker <SymbolChangedEvents>(algorithm, methodInvokers);

            // Algorithm 3.0 data accessors
            var hasOnDataSlice = algorithm.GetType().GetMethods()
                                 .Where(x => x.Name == "OnData" && x.GetParameters().Length == 1 && x.GetParameters()[0].ParameterType == typeof(Slice))
                                 .FirstOrDefault(x => x.DeclaringType == algorithm.GetType()) != null;

            //Go through the subscription types and create invokers to trigger the event handlers for each custom type:
            foreach (var config in algorithm.SubscriptionManager.Subscriptions)
            {
                //If type is a custom feed, check for a dedicated event handler
                if (config.IsCustomData)
                {
                    //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. }
                    var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type });

                    //If we already have this Type-handler then don't add it to invokers again.
                    if (methodInvokers.ContainsKey(config.Type))
                    {
                        continue;
                    }

                    //If we couldnt find the event handler, let the user know we can't fire that event.
                    if (genericMethod == null && !hasOnDataSlice)
                    {
                        algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) {  }");
                        _algorithm.Status      = AlgorithmStatus.RuntimeError;
                        return;
                    }
                    if (genericMethod != null)
                    {
                        methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod());
                    }
                }
            }

            //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm.
            Log.Trace("AlgorithmManager.Run(): Begin DataStream - Start: " + algorithm.StartDate + " Stop: " + algorithm.EndDate);
            foreach (var timeSlice in Stream(job, algorithm, feed, results, token))
            {
                // reset our timer on each loop
                _currentTimeStepTime = DateTime.UtcNow;

                //Check this backtest is still running:
                if (_algorithm.Status != AlgorithmStatus.Running)
                {
                    Log.Error(string.Format("AlgorithmManager.Run(): Algorithm state changed to {0} at {1}", _algorithm.Status, timeSlice.Time));
                    break;
                }

                //Execute with TimeLimit Monitor:
                if (token.IsCancellationRequested)
                {
                    Log.Error("AlgorithmManager.Run(): CancellationRequestion at " + timeSlice.Time);
                    return;
                }

                // before doing anything, check our command queue
                foreach (var command in commands.GetCommands())
                {
                    if (command == null)
                    {
                        continue;
                    }
                    Log.Trace("AlgorithmManager.Run(): Executing {0}", command);
                    CommandResultPacket result;
                    try
                    {
                        result = command.Run(algorithm);
                    }
                    catch (Exception err)
                    {
                        Log.Error(err);
                        algorithm.Error(string.Format("{0} Error: {1}", command.GetType().Name, err.Message));
                        result = new CommandResultPacket(command, false);
                    }

                    // send the result of the command off to the result handler
                    results.Messages.Enqueue(result);
                }

                var time = timeSlice.Time;
                _dataPointCount += timeSlice.DataPointCount;

                //If we're in backtest mode we need to capture the daily performance. We do this here directly
                //before updating the algorithm state with the new data from this time step, otherwise we'll
                //produce incorrect samples (they'll take into account this time step's new price values)
                if (backtestMode)
                {
                    //On day-change sample equity and daily performance for statistics calculations
                    if (_previousTime.Date != time.Date)
                    {
                        SampleBenchmark(algorithm, results, _previousTime.Date);

                        //Sample the portfolio value over time for chart.
                        results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));

                        //Check for divide by zero
                        if (portfolioValue == 0m)
                        {
                            results.SamplePerformance(_previousTime.Date, 0);
                        }
                        else
                        {
                            results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - portfolioValue) * 100 / portfolioValue, 10));
                        }
                        portfolioValue = algorithm.Portfolio.TotalPortfolioValue;
                    }
                }
                else
                {
                    // live mode continously sample the benchmark
                    SampleBenchmark(algorithm, results, time);
                }

                //Update algorithm state after capturing performance from previous day

                // If backtesting, we need to check if there are realtime events in the past
                // which didn't fire because at the scheduled times there was no data (i.e. markets closed)
                // and fire them with the correct date/time.
                if (backtestMode)
                {
                    realtime.ScanPastEvents(time);
                }

                //Set the algorithm and real time handler's time
                algorithm.SetDateTime(time);

                if (timeSlice.Slice.SymbolChangedEvents.Count != 0)
                {
                    if (hasOnDataSymbolChangedEvents)
                    {
                        methodInvokers[typeof(SymbolChangedEvents)](algorithm, timeSlice.Slice.SymbolChangedEvents);
                    }
                    foreach (var symbol in timeSlice.Slice.SymbolChangedEvents.Keys)
                    {
                        // cancel all orders for the old symbol
                        foreach (var ticket in transactions.GetOrderTickets(x => x.Status.IsOpen() && x.Symbol == symbol))
                        {
                            ticket.Cancel("Open order cancelled on symbol changed event");
                        }
                    }
                }

                if (timeSlice.SecurityChanges != SecurityChanges.None)
                {
                    foreach (var security in timeSlice.SecurityChanges.AddedSecurities)
                    {
                        if (!algorithm.Securities.ContainsKey(security.Symbol))
                        {
                            // add the new security
                            algorithm.Securities.Add(security);
                        }
                    }
                }

                //On each time step push the real time prices to the cashbook so we can have updated conversion rates
                foreach (var update in timeSlice.CashBookUpdateData)
                {
                    var cash = update.Target;
                    foreach (var data in update.Data)
                    {
                        cash.Update(data);
                    }
                }

                //Update the securities properties: first before calling user code to avoid issues with data
                foreach (var update in timeSlice.SecuritiesUpdateData)
                {
                    var security = update.Target;
                    foreach (var data in update.Data)
                    {
                        security.SetMarketPrice(data);
                    }

                    // Send market price updates to the TradeBuilder
                    algorithm.TradeBuilder.SetMarketPrice(security.Symbol, security.Price);
                }

                // fire real time events after we've updated based on the new data
                realtime.SetTime(timeSlice.Time);

                // process fill models on the updated data before entering algorithm, applies to all non-market orders
                transactions.ProcessSynchronousEvents();

                // process end of day delistings
                ProcessDelistedSymbols(algorithm, delistings);

                //Check if the user's signalled Quit: loop over data until day changes.
                if (algorithm.Status == AlgorithmStatus.Stopped)
                {
                    Log.Trace("AlgorithmManager.Run(): Algorithm quit requested.");
                    break;
                }
                if (algorithm.RunTimeError != null)
                {
                    _algorithm.Status = AlgorithmStatus.RuntimeError;
                    Log.Trace(string.Format("AlgorithmManager.Run(): Algorithm encountered a runtime error at {0}. Error: {1}", timeSlice.Time, algorithm.RunTimeError));
                    break;
                }

                // perform margin calls, in live mode we can also use realtime to emit these
                if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.UtcNow))
                {
                    // determine if there are possible margin call orders to be executed
                    bool issueMarginCallWarning;
                    var  marginCallOrders = algorithm.Portfolio.ScanForMarginCall(out issueMarginCallWarning);
                    if (marginCallOrders.Count != 0)
                    {
                        var executingMarginCall = false;
                        try
                        {
                            // tell the algorithm we're about to issue the margin call
                            algorithm.OnMarginCall(marginCallOrders);

                            executingMarginCall = true;

                            // execute the margin call orders
                            var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders);
                            foreach (var ticket in executedTickets)
                            {
                                algorithm.Error(string.Format("{0} - Executed MarginCallOrder: {1} - Quantity: {2} @ {3}", algorithm.Time, ticket.Symbol, ticket.Quantity, ticket.AverageFillPrice));
                            }
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithm.Status      = AlgorithmStatus.RuntimeError;
                            var locator = executingMarginCall ? "Portfolio.MarginCallModel.ExecuteMarginCall" : "OnMarginCall";
                            Log.Error(string.Format("AlgorithmManager.Run(): RuntimeError: {0}: ", locator) + err);
                            return;
                        }
                    }
                    // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm
                    else if (issueMarginCallWarning)
                    {
                        try
                        {
                            algorithm.OnMarginCallWarning();
                        }
                        catch (Exception err)
                        {
                            algorithm.RunTimeError = err;
                            _algorithm.Status      = AlgorithmStatus.RuntimeError;
                            Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCallWarning: " + err);
                            return;
                        }
                    }

                    nextMarginCallTime = time + marginCallFrequency;
                }

                // perform check for settlement of unsettled funds
                if (time >= nextSettlementScanTime || (_liveMode && nextSettlementScanTime > DateTime.UtcNow))
                {
                    algorithm.Portfolio.ScanForCashSettlement(algorithm.UtcTime);

                    nextSettlementScanTime = time + settlementScanFrequency;
                }

                // before we call any events, let the algorithm know about universe changes
                if (timeSlice.SecurityChanges != SecurityChanges.None)
                {
                    try
                    {
                        algorithm.OnSecuritiesChanged(timeSlice.SecurityChanges);
                    }
                    catch (Exception err)
                    {
                        algorithm.RunTimeError = err;
                        _algorithm.Status      = AlgorithmStatus.RuntimeError;
                        Log.Error("AlgorithmManager.Run(): RuntimeError: OnSecuritiesChanged event: " + err);
                        return;
                    }
                }

                // apply dividends
                foreach (var dividend in timeSlice.Slice.Dividends.Values)
                {
                    Log.Trace("AlgorithmManager.Run(): {0}: Applying Dividend for {1}", algorithm.Time, dividend.Symbol.ToString());
                    algorithm.Portfolio.ApplyDividend(dividend);
                }

                // apply splits
                foreach (var split in timeSlice.Slice.Splits.Values)
                {
                    try
                    {
                        Log.Trace("AlgorithmManager.Run(): {0}: Applying Split for {1}", algorithm.Time, split.Symbol.ToString());
                        algorithm.Portfolio.ApplySplit(split);
                        // apply the split to open orders as well in raw mode, all other modes are split adjusted
                        if (_liveMode || algorithm.Securities[split.Symbol].DataNormalizationMode == DataNormalizationMode.Raw)
                        {
                            // in live mode we always want to have our order match the order at the brokerage, so apply the split to the orders
                            var openOrders = transactions.GetOrderTickets(ticket => ticket.Status.IsOpen() && ticket.Symbol == split.Symbol);
                            algorithm.BrokerageModel.ApplySplit(openOrders.ToList(), split);
                        }
                    }
                    catch (Exception err)
                    {
                        algorithm.RunTimeError = err;
                        _algorithm.Status      = AlgorithmStatus.RuntimeError;
                        Log.Error("AlgorithmManager.Run(): RuntimeError: Split event: " + err);
                        return;
                    }
                }

                //Update registered consolidators for this symbol index
                try
                {
                    foreach (var update in timeSlice.ConsolidatorUpdateData)
                    {
                        var resolutionTimeSpan = update.Target.Resolution.ToTimeSpan();
                        var consolidators      = update.Target.Consolidators;
                        foreach (var consolidator in consolidators)
                        {
                            foreach (var dataPoint in update.Data)
                            {
                                // Filter out data with resolution higher than the data subscription resolution.
                                // This is needed to avoid feeding in higher resolution data, typically fill-forward bars.
                                // It also prevents volume-based indicators or consolidators summing up volume to generate
                                // invalid values.
                                var algorithmTimeSpan = resolutionTimeSpan == TimeSpan.FromTicks(0)
                                    ? TimeSpan.FromTicks(0)
                                    : TimeSpan.FromSeconds(1);
                                if (update.Target.Resolution == Resolution.Tick ||
                                    algorithm.UtcTime.RoundDown(algorithmTimeSpan) == dataPoint.EndTime.RoundUp(resolutionTimeSpan).ConvertToUtc(update.Target.ExchangeTimeZone))
                                {
                                    consolidator.Update(dataPoint);
                                }
                            }

                            // scan for time after we've pumped all the data through for this consolidator
                            var localTime = time.ConvertFromUtc(update.Target.ExchangeTimeZone);
                            consolidator.Scan(localTime);
                        }
                    }
                }
                catch (Exception err)
                {
                    algorithm.RunTimeError = err;
                    _algorithm.Status      = AlgorithmStatus.RuntimeError;
                    Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err);
                    return;
                }

                // fire custom event handlers
                foreach (var update in timeSlice.CustomData)
                {
                    MethodInvoker methodInvoker;
                    if (!methodInvokers.TryGetValue(update.DataType, out methodInvoker))
                    {
                        continue;
                    }

                    try
                    {
                        foreach (var dataPoint in update.Data)
                        {
                            if (update.DataType.IsInstanceOfType(dataPoint))
                            {
                                methodInvoker(algorithm, dataPoint);
                            }
                        }
                    }
                    catch (Exception err)
                    {
                        algorithm.RunTimeError = err;
                        _algorithm.Status      = AlgorithmStatus.RuntimeError;
                        Log.Error("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err);
                        return;
                    }
                }

                try
                {
                    // fire off the dividend and split events before pricing events
                    if (hasOnDataDividends && timeSlice.Slice.Dividends.Count != 0)
                    {
                        methodInvokers[typeof(Dividends)](algorithm, timeSlice.Slice.Dividends);
                    }
                    if (hasOnDataSplits && timeSlice.Slice.Splits.Count != 0)
                    {
                        methodInvokers[typeof(Splits)](algorithm, timeSlice.Slice.Splits);
                    }
                    if (hasOnDataDelistings && timeSlice.Slice.Delistings.Count != 0)
                    {
                        methodInvokers[typeof(Delistings)](algorithm, timeSlice.Slice.Delistings);
                    }
                }
                catch (Exception err)
                {
                    algorithm.RunTimeError = err;
                    _algorithm.Status      = AlgorithmStatus.RuntimeError;
                    Log.Error("AlgorithmManager.Run(): RuntimeError: Dividends/Splits/Delistings: " + err);
                    return;
                }

                // run the delisting logic after firing delisting events
                HandleDelistedSymbols(algorithm, timeSlice.Slice.Delistings, delistings);

                //After we've fired all other events in this second, fire the pricing events:
                try
                {
                    // TODO: For backwards compatibility only. Remove in 2017
                    // For compatibility with Forex Trade data, moving
                    if (timeSlice.Slice.QuoteBars.Count > 0)
                    {
                        foreach (var tradeBar in timeSlice.Slice.QuoteBars.Where(x => x.Key.ID.SecurityType == SecurityType.Forex))
                        {
                            timeSlice.Slice.Bars.Add(tradeBar.Value.Collapse());
                        }
                    }
                    if (hasOnDataTradeBars && timeSlice.Slice.Bars.Count > 0)
                    {
                        methodInvokers[typeof(TradeBars)](algorithm, timeSlice.Slice.Bars);
                    }
                    if (hasOnDataQuoteBars && timeSlice.Slice.QuoteBars.Count > 0)
                    {
                        methodInvokers[typeof(QuoteBars)](algorithm, timeSlice.Slice.QuoteBars);
                    }
                    if (hasOnDataOptionChains && timeSlice.Slice.OptionChains.Count > 0)
                    {
                        methodInvokers[typeof(OptionChains)](algorithm, timeSlice.Slice.OptionChains);
                    }
                    if (hasOnDataTicks && timeSlice.Slice.Ticks.Count > 0)
                    {
                        methodInvokers[typeof(Ticks)](algorithm, timeSlice.Slice.Ticks);
                    }
                }
                catch (Exception err)
                {
                    algorithm.RunTimeError = err;
                    _algorithm.Status      = AlgorithmStatus.RuntimeError;
                    Log.Error("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err);
                    return;
                }

                try
                {
                    if (timeSlice.Slice.HasData)
                    {
                        // EVENT HANDLER v3.0 -- all data in a single event
                        algorithm.OnData(timeSlice.Slice);
                    }
                }
                catch (Exception err)
                {
                    algorithm.RunTimeError = err;
                    _algorithm.Status      = AlgorithmStatus.RuntimeError;
                    Log.Error("AlgorithmManager.Run(): RuntimeError: Slice: " + err);
                    return;
                }

                //If its the historical/paper trading models, wait until market orders have been "filled"
                // Manually trigger the event handler to prevent thread switch.
                transactions.ProcessSynchronousEvents();

                //Save the previous time for the sample calculations
                _previousTime = time;

                // Process any required events of the results handler such as sampling assets, equity, or stock prices.
                results.ProcessSynchronousEvents();
            } // End of ForEach feed.Bridge.GetConsumingEnumerable

            // stop timing the loops
            _currentTimeStepTime = DateTime.MinValue;

            //Stream over:: Send the final packet and fire final events:
            Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm...");
            try
            {
                algorithm.OnEndOfAlgorithm();
            }
            catch (Exception err)
            {
                _algorithm.Status      = AlgorithmStatus.RuntimeError;
                algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException);
                Log.Error("AlgorithmManager.OnEndOfAlgorithm(): " + err);
                return;
            }

            // Process any required events of the results handler such as sampling assets, equity, or stock prices.
            results.ProcessSynchronousEvents(forceProcess: true);

            //Liquidate Holdings for Calculations:
            if (_algorithm.Status == AlgorithmStatus.Liquidated && _liveMode)
            {
                Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings...");
                algorithm.Liquidate();
                results.LogMessage("Algorithm Liquidated");
                results.SendStatusUpdate(AlgorithmStatus.Liquidated);
            }

            //Manually stopped the algorithm
            if (_algorithm.Status == AlgorithmStatus.Stopped)
            {
                Log.Trace("AlgorithmManager.Run(): Stopping algorithm...");
                results.LogMessage("Algorithm Stopped");
                results.SendStatusUpdate(AlgorithmStatus.Stopped);
            }

            //Backtest deleted.
            if (_algorithm.Status == AlgorithmStatus.Deleted)
            {
                Log.Trace("AlgorithmManager.Run(): Deleting algorithm...");
                results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request.");
                results.SendStatusUpdate(AlgorithmStatus.Deleted);
            }

            //Algorithm finished, send regardless of commands:
            results.SendStatusUpdate(AlgorithmStatus.Completed);

            //Take final samples:
            results.SampleRange(algorithm.GetChartUpdates());
            results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));
            SampleBenchmark(algorithm, results, backtestMode ? _previousTime.Date : _previousTime);

            //Check for divide by zero
            if (portfolioValue == 0m)
            {
                results.SamplePerformance(backtestMode ? _previousTime.Date : _previousTime, 0m);
            }
            else
            {
                results.SamplePerformance(backtestMode ? _previousTime.Date : _previousTime,
                                          Math.Round((algorithm.Portfolio.TotalPortfolioValue - portfolioValue) * 100 / portfolioValue, 10));
            }
        } // End of Run();
Esempio n. 55
0
        /********************************************************
        * CLASS METHODS
        *********************************************************/
        /// <summary>
        /// Launch the algorithm manager to run this strategy
        /// </summary>
        /// <param name="job">Algorithm job</param>
        /// <param name="algorithm">Algorithm instance</param>
        /// <param name="feed">Datafeed object</param>
        /// <param name="transactions">Transaction manager object</param>
        /// <param name="results">Result handler object</param>
        /// <param name="setup">Setup handler object</param>
        /// <param name="realtime">Realtime processing object</param>
        /// <remarks>Modify with caution</remarks>
        public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime)
        {
            //Initialize:
            var backwardsCompatibilityMode = false;
            var tradebarsType = typeof (TradeBars);
            var ticksType = typeof(Ticks);
            var startingPerformance = setup.StartingCapital;
            var backtestMode = (job.Type == PacketType.BacktestNode);
            var methodInvokers = new Dictionary<Type, MethodInvoker>();

            //Initialize Properties:
            _frontier = setup.StartingDate;
            _runtimeError = null;
            _algorithmId = job.AlgorithmId;
            _algorithmState = AlgorithmStatus.Running;
            _previousTime = setup.StartingDate.Date;

            //Create the method accessors to push generic types into algorithm: Find all OnData events:

            //Algorithm 1.0 Data Accessors.
            //If the users defined these methods, add them in manually. This allows keeping backwards compatibility to algorithm 1.0.
            var oldTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnTradeBar",   new[] { typeof(Dictionary<string, TradeBar>) });
            var oldTicksMethodInfo = (algorithm.GetType()).GetMethod("OnTick", new[] { typeof(Dictionary<string, List<Tick>>) });

            //Algorithm 2.0 Data Generics Accessors.
            //New hidden access to tradebars with custom type.
            var newTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { tradebarsType });
            var newTicksMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { ticksType });

            if (newTradeBarsMethodInfo == null && newTicksMethodInfo == null)
            {
                backwardsCompatibilityMode = true;
                if (oldTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, oldTradeBarsMethodInfo.DelegateForCallMethod());
                if (oldTradeBarsMethodInfo != null) methodInvokers.Add(ticksType, oldTicksMethodInfo.DelegateForCallMethod());
            }
            else
            {
                backwardsCompatibilityMode = false;
                if (newTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, newTradeBarsMethodInfo.DelegateForCallMethod());
                if (newTicksMethodInfo != null) methodInvokers.Add(ticksType, newTicksMethodInfo.DelegateForCallMethod());
            }

            //Go through the subscription types and create invokers to trigger the event handlers for each custom type:
            foreach (var config in feed.Subscriptions)
            {
                //If type is a tradebar, combine tradebars and ticks into unified array:
                if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick")
                {
                    //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. }
                    var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type });

                    //Is we already have this Type-handler then don't add it to invokers again.
                    if (methodInvokers.ContainsKey(config.Type)) continue;

                    //If we couldnt find the event handler, let the user know we can't fire that event.
                    if (genericMethod == null)
                    {
                        _runtimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) {  }");
                        _algorithmState = AlgorithmStatus.RuntimeError;
                        return;
                    }
                    methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod());
                }
            }

            //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm.
            Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop.");
            foreach (var newData in DataStream.GetData(feed, setup.StartingDate))
            {
                //Check this backtest is still running:
                if (_algorithmState != AlgorithmStatus.Running) break;

                //Go over each time stamp we've collected, pass it into the algorithm in order:
                foreach (var time in newData.Keys)
                {
                    //Set the time frontier:
                    _frontier = time;

                    //Execute with TimeLimit Monitor:
                    if (Isolator.IsCancellationRequested) return;

                    //Refresh the realtime event monitor:
                    realtime.SetTime(time);

                    //Fire EOD if the time packet we just processed is greater
                    if (backtestMode && _previousTime.Date != time.Date)
                    {
                        //Sample the portfolio value over time for chart.
                        results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));

                        if (startingPerformance == 0)
                        {
                            results.SamplePerformance(_previousTime.Date, 0);
                        }
                        else
                        {
                            results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10));
                        }

                        startingPerformance = algorithm.Portfolio.TotalPortfolioValue;
                    }

                    //Check if the user's signalled Quit: loop over data until day changes.
                    if (algorithm.GetQuit())
                    {
                        _algorithmState = AlgorithmStatus.Quit;
                        break;
                    }

                    //Pass in the new time first:
                    algorithm.SetDateTime(time);

                    //Trigger the data events: Invoke the types we have data for:
                    var oldBars = new Dictionary<string, TradeBar>();
                    var oldTicks = new Dictionary<string, List<Tick>>();
                    var newBars = new TradeBars(time);
                    var newTicks = new Ticks(time);

                    //Invoke all non-tradebars, non-ticks methods:
                    // --> i == Subscription Configuration Index, so we don't need to compare types.
                    foreach (var i in newData[time].Keys)
                    {
                        //Data point and config of this point:
                        var dataPoints = newData[time][i];
                        var config = feed.Subscriptions[i];

                        //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop.
                        foreach (var dataPoint in dataPoints)
                        {
                            //Update the securities properties: first before calling user code to avoid issues with data
                            algorithm.Securities.Update(time, dataPoint);

                            //Update registered consolidators for this symbol index
                            for (var j = 0; j < config.Consolidators.Count; j++)
                            {
                                config.Consolidators[j].Update(dataPoint);
                            }

                            switch (config.Type.Name)
                            {
                                case "TradeBar":
                                    var bar = dataPoint as TradeBar;
                                    try
                                    {
                                        if (bar != null)
                                        {
                                            if (backwardsCompatibilityMode)
                                            {
                                                if (!oldBars.ContainsKey(bar.Symbol)) oldBars.Add(bar.Symbol, bar);
                                            }
                                            else
                                            {
                                                if (!newBars.ContainsKey(bar.Symbol)) newBars.Add(bar.Symbol, bar);
                                            }
                                        }
                                    }
                                    catch (Exception err)
                                    {
                                        Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C"));
                                        Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message);
                                    }
                                    break;

                                case "Tick":
                                    var tick = dataPoint as Tick;
                                    if (tick != null)
                                    {
                                         if (backwardsCompatibilityMode) {
                                             if (!oldTicks.ContainsKey(tick.Symbol)) { oldTicks.Add(tick.Symbol, new List<Tick>()); }
                                             oldTicks[tick.Symbol].Add(tick);
                                         }
                                         else
                                         {
                                             if (!newTicks.ContainsKey(tick.Symbol)) { newTicks.Add(tick.Symbol, new List<Tick>()); }
                                             newTicks[tick.Symbol].Add(tick);
                                         }
                                    }
                                    break;

                                default:
                                    //Send data into the generic algorithm event handlers
                                    try
                                    {
                                        methodInvokers[config.Type](algorithm, dataPoint);
                                    }
                                    catch (Exception err)
                                    {
                                        _runtimeError = err;
                                        _algorithmState = AlgorithmStatus.RuntimeError;
                                        Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace);
                                        return;
                                    }
                                    break;
                            }
                        }
                    }

                    //After we've fired all other events in this second, fire the pricing events:
                    if (backwardsCompatibilityMode)
                    {
                        //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers...");
                        try
                        {
                            if (oldTradeBarsMethodInfo != null && oldBars.Count > 0) methodInvokers[tradebarsType](algorithm, oldBars);
                            if (oldTicksMethodInfo != null && oldTicks.Count > 0) methodInvokers[ticksType](algorithm, oldTicks);
                        }
                        catch (Exception err)
                        {
                            _runtimeError = err;
                            _algorithmState = AlgorithmStatus.RuntimeError;
                            Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }
                    else
                    {
                        //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers...");
                        try
                        {
                            if (newTradeBarsMethodInfo != null && newBars.Count > 0) methodInvokers[tradebarsType](algorithm, newBars);
                            if (newTicksMethodInfo != null && newTicks.Count > 0) methodInvokers[ticksType](algorithm, newTicks);
                        }
                        catch (Exception err)
                        {
                            _runtimeError = err;
                            _algorithmState = AlgorithmStatus.RuntimeError;
                            Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }

                    //If its the historical/paper trading models, wait until market orders have been "filled"
                    // Manually trigger the event handler to prevent thread switch.
                    transactions.ProcessSynchronousEvents();

                    //Save the previous time for the sample calculations
                    _previousTime = time;

                } // End of Time Loop

                // Process any required events of the results handler such as sampling assets, equity, or stock prices.
                results.ProcessSynchronousEvents();
            } // End of ForEach DataStream

            //Stream over:: Send the final packet and fire final events:
            Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm...");
            try
            {
                algorithm.OnEndOfAlgorithm();
            }
            catch (Exception err)
            {
                _algorithmState = AlgorithmStatus.RuntimeError;
                _runtimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException);
                Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace);
                return;
            }

            // Process any required events of the results handler such as sampling assets, equity, or stock prices.
            results.ProcessSynchronousEvents(forceProcess: true);

            //Liquidate Holdings for Calculations:
            if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode)
            {
                Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings...");
                algorithm.Liquidate();
                results.LogMessage("Algorithm Liquidated");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated);
            }

            //Manually stopped the algorithm
            if (_algorithmState == AlgorithmStatus.Stopped)
            {
                Log.Trace("AlgorithmManager.Run(): Stopping algorithm...");
                results.LogMessage("Algorithm Stopped");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped);
            }

            //Backtest deleted.
            if (_algorithmState == AlgorithmStatus.Deleted)
            {
                Log.Trace("AlgorithmManager.Run(): Deleting algorithm...");
                results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request.");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted);
            }

            //Algorithm finished, send regardless of commands:
            results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed);

            //Take final samples:
            results.SampleRange(algorithm.GetChartUpdates());
            results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));
            results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10));
        }
Esempio n. 56
0
        /// <summary>
        /// Initialize the result handler with this result packet.
        /// </summary>
        /// <param name="packet">Algorithm job packet for this result handler</param>
        /// <param name="messagingHandler"></param>
        /// <param name="api"></param>
        /// <param name="dataFeed"></param>
        /// <param name="setupHandler"></param>
        /// <param name="transactionHandler"></param>
        public void Initialize(AlgorithmNodePacket packet, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
        {
            // we expect one of two types here, the backtest node packet or the live node packet
            var job = packet as BacktestNodePacket;

            if (job != null)
            {
                _algorithmNode = new BacktestConsoleStatusHandler(job);
            }
            else
            {
                var live = packet as LiveNodePacket;
                if (live == null)
                {
                    throw new ArgumentException("Unexpected AlgorithmNodeType: " + packet.GetType().Name);
                }
                _algorithmNode = new LiveConsoleStatusHandler(live);
            }
            _resamplePeriod = _algorithmNode.ComputeSampleEquityPeriod();

            var time = DateTime.Now.ToString("yyyy-MM-dd-HH-mm");

            _chartDirectory = Path.Combine("../../../Charts/", packet.AlgorithmId, time);
            if (Directory.Exists(_chartDirectory))
            {
                foreach (var file in Directory.EnumerateFiles(_chartDirectory, "*.csv", SearchOption.AllDirectories))
                {
                    File.Delete(file);
                }
                Directory.Delete(_chartDirectory, true);
            }
            Directory.CreateDirectory(_chartDirectory);
            _messagingHandler = messagingHandler;
        }