Exemplo n.º 1
0
        /// <summary>
        /// Creates the correct enumerator factory for the given request
        /// </summary>
        private ISubscriptionEnumeratorFactory GetEnumeratorFactory(SubscriptionRequest request)
        {
            if (request.IsUniverseSubscription)
            {
                if (request.Universe is ITimeTriggeredUniverse)
                {
                    var universe = request.Universe as UserDefinedUniverse;
                    if (universe != null)
                    {
                        // Trigger universe selection when security added/removed after Initialize
                        universe.CollectionChanged += (sender, args) =>
                        {
                            var items =
                                args.Action == NotifyCollectionChangedAction.Add ? args.NewItems :
                                args.Action == NotifyCollectionChangedAction.Remove ? args.OldItems : null;

                            if (items == null)
                            {
                                return;
                            }

                            var symbol = items.OfType <Symbol>().FirstOrDefault();
                            if (symbol == null)
                            {
                                return;
                            }

                            var collection = new BaseDataCollection(_algorithm.UtcTime, symbol);
                            var changes    = _universeSelection.ApplyUniverseSelection(universe, _algorithm.UtcTime, collection);
                            _algorithm.OnSecuritiesChanged(changes);
                        };
                    }

                    return(new TimeTriggeredUniverseSubscriptionEnumeratorFactory(request.Universe as ITimeTriggeredUniverse, MarketHoursDatabase.FromDataFolder()));
                }
                if (request.Configuration.Type == typeof(CoarseFundamental))
                {
                    return(new BaseDataCollectionSubscriptionEnumeratorFactory());
                }
                if (request.Universe is OptionChainUniverse)
                {
                    return(new OptionChainUniverseSubscriptionEnumeratorFactory((req, e) => ConfigureEnumerator(req, true, e),
                                                                                _mapFileProvider.Get(request.Security.Symbol.ID.Market), _factorFileProvider));
                }
                if (request.Universe is FuturesChainUniverse)
                {
                    return(new FuturesChainUniverseSubscriptionEnumeratorFactory((req, e) => ConfigureEnumerator(req, true, e)));
                }
            }

            return(_subscriptionfactory);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Creates the correct enumerator factory for the given request
        /// </summary>
        private ISubscriptionEnumeratorFactory GetEnumeratorFactory(SubscriptionRequest request)
        {
            if (request.IsUniverseSubscription)
            {
                if (request.Universe is UserDefinedUniverse)
                {
                    // Trigger universe selection when security added/removed after Initialize
                    var universe = (UserDefinedUniverse)request.Universe;
                    universe.CollectionChanged += (sender, args) =>
                    {
                        var items =
                            args.Action == NotifyCollectionChangedAction.Add ? args.NewItems :
                            args.Action == NotifyCollectionChangedAction.Remove ? args.OldItems : null;

                        if (items == null || _frontierUtc == DateTime.MinValue)
                        {
                            return;
                        }

                        var symbol = items.OfType <Symbol>().FirstOrDefault();
                        if (symbol == null)
                        {
                            return;
                        }

                        var collection = new BaseDataCollection(_frontierUtc, symbol);
                        var changes    = _universeSelection.ApplyUniverseSelection(universe, _frontierUtc, collection);
                        _algorithm.OnSecuritiesChanged(changes);
                    };

                    return(new UserDefinedUniverseSubscriptionEnumeratorFactory(request.Universe as UserDefinedUniverse, MarketHoursDatabase.FromDataFolder()));
                }
                if (request.Configuration.Type == typeof(CoarseFundamental))
                {
                    return(new BaseDataCollectionSubscriptionEnumeratorFactory());
                }
                if (request.Universe is OptionChainUniverse)
                {
                    return(new OptionChainUniverseSubscriptionEnumeratorFactory((req, e) => ConfigureEnumerator(req, true, e)));
                }
            }

            var mapFileResolver = request.Configuration.SecurityType == SecurityType.Equity
                ? _mapFileProvider.Get(request.Security.Symbol.ID.Market)
                : MapFileResolver.Empty;

            return(new PostCreateConfigureSubscriptionEnumeratorFactory(
                       new SubscriptionDataReaderSubscriptionEnumeratorFactory(_resultHandler, mapFileResolver, _factorFileProvider, _dataFileProvider, false, true),
                       enumerator => ConfigureEnumerator(request, false, enumerator)
                       ));
        }
        /// <summary>
        /// Syncs the specified subscriptions. The frontier time used for synchronization is
        /// managed internally and dependent upon previous synchronization operations.
        /// </summary>
        /// <param name="subscriptions">The subscriptions to sync</param>
        public TimeSlice Sync(IEnumerable <Subscription> subscriptions)
        {
            var delayedSubscriptionFinished = false;
            var changes = SecurityChanges.None;
            var data    = new List <DataFeedPacket>();
            // NOTE: Tight coupling in UniverseSelection.ApplyUniverseSelection
            var universeData = new Dictionary <Universe, BaseDataCollection>();
            var universeDataForTimeSliceCreate = new Dictionary <Universe, BaseDataCollection>();

            _frontierTimeProvider.SetCurrentTimeUtc(_timeProvider.GetUtcNow());
            var frontierUtc = _frontierTimeProvider.GetUtcNow();

            SecurityChanges newChanges;

            do
            {
                newChanges = SecurityChanges.None;
                foreach (var subscription in subscriptions)
                {
                    if (subscription.EndOfStream)
                    {
                        OnSubscriptionFinished(subscription);
                        continue;
                    }

                    // prime if needed
                    if (subscription.Current == null)
                    {
                        if (!subscription.MoveNext())
                        {
                            OnSubscriptionFinished(subscription);
                            continue;
                        }
                    }

                    var packet = new DataFeedPacket(subscription.Security, subscription.Configuration, subscription.RemovedFromUniverse);

                    while (subscription.Current != null && subscription.Current.EmitTimeUtc <= frontierUtc)
                    {
                        packet.Add(subscription.Current.Data);

                        if (!subscription.MoveNext())
                        {
                            delayedSubscriptionFinished = true;
                            break;
                        }
                    }

                    if (packet.Count > 0)
                    {
                        // we have new universe data to select based on, store the subscription data until the end
                        if (!subscription.IsUniverseSelectionSubscription)
                        {
                            data.Add(packet);
                        }
                        else
                        {
                            // assume that if the first item is a base data collection then the enumerator handled the aggregation,
                            // otherwise, load all the the data into a new collection instance
                            var packetBaseDataCollection = packet.Data[0] as BaseDataCollection;
                            var packetData = packetBaseDataCollection == null
                                ? packet.Data
                                : packetBaseDataCollection.Data;

                            BaseDataCollection collection;
                            if (universeData.TryGetValue(subscription.Universes.Single(), out collection))
                            {
                                collection.Data.AddRange(packetData);
                            }
                            else
                            {
                                if (packetBaseDataCollection is OptionChainUniverseDataCollection)
                                {
                                    var current = packetBaseDataCollection as OptionChainUniverseDataCollection;
                                    collection = new OptionChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData, current?.Underlying);
                                }
                                else if (packetBaseDataCollection is FuturesChainUniverseDataCollection)
                                {
                                    collection = new FuturesChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData);
                                }
                                else
                                {
                                    collection = new BaseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData);
                                }

                                universeData[subscription.Universes.Single()] = collection;
                            }
                        }
                    }

                    if (subscription.IsUniverseSelectionSubscription &&
                        subscription.Universes.Single().DisposeRequested ||
                        delayedSubscriptionFinished)
                    {
                        delayedSubscriptionFinished = false;
                        // we need to do this after all usages of subscription.Universes
                        OnSubscriptionFinished(subscription);
                    }
                }

                foreach (var kvp in universeData)
                {
                    var universe           = kvp.Key;
                    var baseDataCollection = kvp.Value;
                    universeDataForTimeSliceCreate[universe] = baseDataCollection;
                    newChanges += _universeSelection.ApplyUniverseSelection(universe, frontierUtc, baseDataCollection);
                }
                universeData.Clear();

                changes += newChanges;
            }while (newChanges != SecurityChanges.None);

            var timeSlice = _timeSliceFactory.Create(frontierUtc, data, changes, universeDataForTimeSliceCreate);

            return(timeSlice);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Syncs the specifies subscriptions at the frontier time
        /// </summary>
        /// <param name="frontier">The time used for syncing, data in the future won't be included in this time slice</param>
        /// <param name="subscriptions">The subscriptions to sync</param>
        /// <param name="sliceTimeZone">The time zone of the created slice object</param>
        /// <param name="cashBook">The cash book, used for creating the cash book updates</param>
        /// <param name="nextFrontier">The next frontier time as determined by the first piece of data in the future ahead of the frontier.
        /// This value will equal DateTime.MaxValue when the subscriptions are all finished</param>
        /// <returns>A time slice for the specified frontier time</returns>
        public TimeSlice Sync(DateTime frontier, IEnumerable <Subscription> subscriptions, DateTimeZone sliceTimeZone, CashBook cashBook, out DateTime nextFrontier)
        {
            var changes = SecurityChanges.None;

            nextFrontier = DateTime.MaxValue;
            var earlyBirdTicks = nextFrontier.Ticks;
            var data           = new List <DataFeedPacket>();
            var universeData   = new Dictionary <Universe, BaseDataCollection>();

            SecurityChanges newChanges;

            do
            {
                universeData.Clear();
                newChanges = SecurityChanges.None;
                foreach (var subscription in subscriptions)
                {
                    if (subscription.EndOfStream)
                    {
                        OnSubscriptionFinished(subscription);
                        continue;
                    }

                    // prime if needed
                    if (subscription.Current == null)
                    {
                        if (!subscription.MoveNext())
                        {
                            OnSubscriptionFinished(subscription);
                            continue;
                        }
                    }

                    var packet = new DataFeedPacket(subscription.Security, subscription.Configuration);
                    data.Add(packet);

                    var configuration      = subscription.Configuration;
                    var offsetProvider     = subscription.OffsetProvider;
                    var currentOffsetTicks = offsetProvider.GetOffsetTicks(frontier);
                    while (subscription.Current.EndTime.Ticks - currentOffsetTicks <= frontier.Ticks)
                    {
                        // we want bars rounded using their subscription times, we make a clone
                        // so we don't interfere with the enumerator's internal logic
                        var clone = subscription.Current.Clone(subscription.Current.IsFillForward);
                        clone.Time = clone.Time.ExchangeRoundDown(configuration.Increment, subscription.Security.Exchange.Hours, configuration.ExtendedMarketHours);
                        packet.Add(clone);
                        if (!subscription.MoveNext())
                        {
                            OnSubscriptionFinished(subscription);
                            break;
                        }
                    }

                    // we have new universe data to select based on, store the subscription data until the end
                    if (subscription.IsUniverseSelectionSubscription && packet.Count > 0)
                    {
                        // assume that if the first item is a base data collection then the enumerator handled the aggregation,
                        // otherwise, load all the the data into a new collection instance
                        var packetBaseDataCollection = packet.Data[0] as BaseDataCollection;
                        var packetData = packetBaseDataCollection == null
                            ? packet.Data
                            : packetBaseDataCollection.Data;

                        BaseDataCollection collection;
                        if (!universeData.TryGetValue(subscription.Universe, out collection))
                        {
                            if (packetBaseDataCollection is OptionChainUniverseDataCollection)
                            {
                                var current    = subscription.Current as OptionChainUniverseDataCollection;
                                var underlying = current != null ? current.Underlying : null;
                                collection = new OptionChainUniverseDataCollection(frontier, subscription.Configuration.Symbol, packetData, underlying);
                            }
                            else
                            {
                                collection = new BaseDataCollection(frontier, subscription.Configuration.Symbol, packetData);
                            }

                            universeData[subscription.Universe] = collection;
                        }
                        else
                        {
                            collection.Data.AddRange(packetData);
                        }
                    }

                    if (subscription.Current != null)
                    {
                        // take the earliest between the next piece of data or the next tz discontinuity
                        earlyBirdTicks = Math.Min(earlyBirdTicks, Math.Min(subscription.Current.EndTime.Ticks - currentOffsetTicks, offsetProvider.GetNextDiscontinuity()));
                    }
                }

                foreach (var kvp in universeData)
                {
                    var universe           = kvp.Key;
                    var baseDataCollection = kvp.Value;
                    newChanges += _universeSelection.ApplyUniverseSelection(universe, frontier, baseDataCollection);
                }

                changes += newChanges;
            }while (newChanges != SecurityChanges.None);

            nextFrontier = new DateTime(Math.Max(earlyBirdTicks, frontier.Ticks), DateTimeKind.Utc);

            return(TimeSlice.Create(frontier, sliceTimeZone, cashBook, data, changes));
        }
Exemplo n.º 5
0
        /// <summary>
        /// Syncs the specified subscriptions. The frontier time used for synchronization is
        /// managed internally and dependent upon previous synchronization operations.
        /// </summary>
        /// <param name="subscriptions">The subscriptions to sync</param>
        /// <param name="cancellationToken">The cancellation token to stop enumeration</param>
        public IEnumerable <TimeSlice> Sync(IEnumerable <Subscription> subscriptions,
                                            CancellationToken cancellationToken)
        {
            var delayedSubscriptionFinished = new Queue <Subscription>();

            while (!cancellationToken.IsCancellationRequested)
            {
                var changes = SecurityChanges.None;
                var data    = new List <DataFeedPacket>(1);
                // NOTE: Tight coupling in UniverseSelection.ApplyUniverseSelection
                var universeData = new Dictionary <Universe, BaseDataCollection>();
                var universeDataForTimeSliceCreate = new Dictionary <Universe, BaseDataCollection>();

                _frontierTimeProvider.SetCurrentTimeUtc(_timeProvider.GetUtcNow());
                var frontierUtc = _frontierTimeProvider.GetUtcNow();

                SecurityChanges newChanges;
                do
                {
                    newChanges = SecurityChanges.None;
                    foreach (var subscription in subscriptions)
                    {
                        if (subscription.EndOfStream)
                        {
                            OnSubscriptionFinished(subscription);
                            continue;
                        }

                        // prime if needed
                        if (subscription.Current == null)
                        {
                            if (!subscription.MoveNext())
                            {
                                OnSubscriptionFinished(subscription);
                                continue;
                            }
                        }

                        DataFeedPacket packet = null;

                        while (subscription.Current != null && subscription.Current.EmitTimeUtc <= frontierUtc)
                        {
                            if (packet == null)
                            {
                                // for performance, lets be selfish about creating a new instance
                                packet = new DataFeedPacket(
                                    subscription.Security,
                                    subscription.Configuration,
                                    subscription.RemovedFromUniverse
                                    );
                            }
                            packet.Add(subscription.Current.Data);

                            if (!subscription.MoveNext())
                            {
                                delayedSubscriptionFinished.Enqueue(subscription);
                                break;
                            }
                        }

                        if (packet?.Count > 0)
                        {
                            // we have new universe data to select based on, store the subscription data until the end
                            if (!subscription.IsUniverseSelectionSubscription)
                            {
                                data.Add(packet);
                            }
                            else
                            {
                                // assume that if the first item is a base data collection then the enumerator handled the aggregation,
                                // otherwise, load all the the data into a new collection instance
                                var packetBaseDataCollection = packet.Data[0] as BaseDataCollection;
                                var packetData = packetBaseDataCollection == null
                                    ? packet.Data
                                    : packetBaseDataCollection.Data;

                                BaseDataCollection collection;
                                if (universeData.TryGetValue(subscription.Universes.Single(), out collection))
                                {
                                    collection.Data.AddRange(packetData);
                                }
                                else
                                {
                                    if (packetBaseDataCollection is OptionChainUniverseDataCollection)
                                    {
                                        var current = packetBaseDataCollection as OptionChainUniverseDataCollection;
                                        collection = new OptionChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData, current?.Underlying);
                                    }
                                    else if (packetBaseDataCollection is FuturesChainUniverseDataCollection)
                                    {
                                        collection = new FuturesChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData);
                                    }
                                    else
                                    {
                                        collection = new BaseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData);
                                    }

                                    universeData[subscription.Universes.Single()] = collection;
                                }
                            }
                        }

                        if (subscription.IsUniverseSelectionSubscription &&
                            subscription.Universes.Single().DisposeRequested)
                        {
                            // we need to do this after all usages of subscription.Universes
                            OnSubscriptionFinished(subscription);
                        }
                    }

                    if (universeData.Any())
                    {
                        // if we are going to perform universe selection we emit an empty
                        // time pulse to align algorithm time with current frontier
                        yield return(_timeSliceFactory.CreateTimePulse(frontierUtc));
                    }

                    foreach (var kvp in universeData)
                    {
                        var universe           = kvp.Key;
                        var baseDataCollection = kvp.Value;
                        universeDataForTimeSliceCreate[universe] = baseDataCollection;
                        newChanges += _universeSelection.ApplyUniverseSelection(universe, frontierUtc, baseDataCollection);
                    }
                    universeData.Clear();

                    changes += newChanges;
                }while (newChanges != SecurityChanges.None ||
                        _universeSelection.AddPendingInternalDataFeeds(frontierUtc));

                var timeSlice = _timeSliceFactory.Create(frontierUtc, data, changes, universeDataForTimeSliceCreate);

                while (delayedSubscriptionFinished.Count > 0)
                {
                    // these subscriptions added valid data to the packet
                    // we need to trigger OnSubscriptionFinished after we create the TimeSlice
                    // else it will drop the data
                    var subscription = delayedSubscriptionFinished.Dequeue();
                    OnSubscriptionFinished(subscription);
                }

                yield return(timeSlice);
            }
        }
Exemplo n.º 6
0
        /// <summary>
        /// Runs a single backtest/live job from the job queue
        /// </summary>
        /// <param name="job">The algorithm job to be processed</param>
        /// <param name="assemblyPath">The path to the algorithm's assembly</param>
        public void Run(AlgorithmNodePacket job, string assemblyPath)
        {
            var algorithm = default(IAlgorithm);
            var algorithmManager = new AlgorithmManager(_liveMode);

            //Start monitoring the backtest active status:
            var statusPing = new StateCheck.Ping(algorithmManager, _systemHandlers.Api, _algorithmHandlers.Results, _systemHandlers.Notify, job);
            var statusPingThread = new Thread(statusPing.Run);
            statusPingThread.Start();

            try
            {
                //Reset thread holders.
                var initializeComplete = false;
                Thread threadFeed = null;
                Thread threadTransactions = null;
                Thread threadResults = null;
                Thread threadRealTime = null;

                //-> Initialize messaging system
                _systemHandlers.Notify.SetChannel(job.Channel);

                //-> Set the result handler type for this algorithm job, and launch the associated result thread.
                _algorithmHandlers.Results.Initialize(job, _systemHandlers.Notify, _systemHandlers.Api, _algorithmHandlers.DataFeed, _algorithmHandlers.Setup, _algorithmHandlers.Transactions);

                threadResults = new Thread(_algorithmHandlers.Results.Run, 0) {Name = "Result Thread"};
                threadResults.Start();

                IBrokerage brokerage = null;
                try
                {
                    // Save algorithm to cache, load algorithm instance:
                    algorithm = _algorithmHandlers.Setup.CreateAlgorithmInstance(assemblyPath, job.Language);

                    // set the history provider before setting up the algorithm
                    _algorithmHandlers.HistoryProvider.Initialize(job, progress =>
                    {
                        // send progress updates to the result handler only during initialization
                        if (!algorithm.GetLocked() || algorithm.IsWarmingUp)
                        {
                            _algorithmHandlers.Results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.History,
                                string.Format("Processing history {0}%...", progress));
                        }
                    });
                    algorithm.HistoryProvider = _algorithmHandlers.HistoryProvider;

                    // initialize the default brokerage message handler
                    algorithm.BrokerageMessageHandler = new DefaultBrokerageMessageHandler(algorithm, job, _algorithmHandlers.Results, _systemHandlers.Api);

                    //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method.
                    initializeComplete = _algorithmHandlers.Setup.Setup(algorithm, out brokerage, job, _algorithmHandlers.Results, _algorithmHandlers.Transactions, _algorithmHandlers.RealTime);

                    // set this again now that we've actually added securities
                    _algorithmHandlers.Results.SetAlgorithm(algorithm);

                    //If there are any reasons it failed, pass these back to the IDE.
                    if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || _algorithmHandlers.Setup.Errors.Count > 0)
                    {
                        initializeComplete = false;
                        //Get all the error messages: internal in algorithm and external in setup handler.
                        var errorMessage = String.Join(",", algorithm.ErrorMessages);
                        errorMessage += String.Join(",", _algorithmHandlers.Setup.Errors);
                        Log.Error("Engine.Run(): " + errorMessage);
                        _algorithmHandlers.Results.RuntimeError(errorMessage);
                        _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, errorMessage);
                    }
                }
                catch (Exception err)
                {
                    var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err.StackTrace;
                    _algorithmHandlers.Results.RuntimeError(runtimeMessage, err.StackTrace);
                    _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage);
                }

                //-> Using the job + initialization: load the designated handlers:
                if (initializeComplete)
                {
                    //-> Reset the backtest stopwatch; we're now running the algorithm.
                    var startTime = DateTime.Now;

                    //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates.
                    algorithm.SetAlgorithmId(job.AlgorithmId);
                    algorithm.SetLocked();

                    //Wire up the universe selection event handler before kicking off the data feed
                    var universeSelection = new UniverseSelection(_algorithmHandlers.DataFeed, algorithm, _liveMode);
                    _algorithmHandlers.DataFeed.UniverseSelection += (sender, args) => universeSelection.ApplyUniverseSelection(args);

                    //Load the associated handlers for data, transaction and realtime events:
                    _algorithmHandlers.DataFeed.Initialize(algorithm, job, _algorithmHandlers.Results);
                    _algorithmHandlers.Transactions.Initialize(algorithm, brokerage, _algorithmHandlers.Results);
                    _algorithmHandlers.RealTime.Setup(algorithm, job, _algorithmHandlers.Results, _systemHandlers.Api);

                    // wire up the brokerage message handler
                    brokerage.Message += (sender, message) => algorithm.BrokerageMessageHandler.Handle(message);

                    //Send status to user the algorithm is now executing.
                    _algorithmHandlers.Results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running);

                    //Launch the data, transaction and realtime handlers into dedicated threads
                    threadFeed = new Thread(_algorithmHandlers.DataFeed.Run) {Name = "DataFeed Thread"};
                    threadTransactions = new Thread(_algorithmHandlers.Transactions.Run) {Name = "Transaction Thread"};
                    threadRealTime = new Thread(_algorithmHandlers.RealTime.Run) {Name = "RealTime Thread"};

                    //Launch the data feed, result sending, and transaction models/handlers in separate threads.
                    threadFeed.Start(); // Data feed pushing data packets into thread bridge;
                    threadTransactions.Start(); // Transaction modeller scanning new order requests
                    threadRealTime.Start(); // RealTime scan time for time based events:

                    // Result manager scanning message queue: (started earlier)
                    _algorithmHandlers.Results.DebugMessage(string.Format("Launching analysis for {0} with LEAN Engine v{1}", job.AlgorithmId, Constants.Version));

                    try
                    {
                        //Create a new engine isolator class
                        var isolator = new Isolator();

                        // Execute the Algorithm Code:
                        var complete = isolator.ExecuteWithTimeLimit(_algorithmHandlers.Setup.MaximumRuntime, algorithmManager.TimeLoopWithinLimits, () =>
                        {
                            try
                            {
                                //Run Algorithm Job:
                                // -> Using this Data Feed,
                                // -> Send Orders to this TransactionHandler,
                                // -> Send Results to ResultHandler.
                                algorithmManager.Run(job, algorithm, _algorithmHandlers.DataFeed, _algorithmHandlers.Transactions, _algorithmHandlers.Results, _algorithmHandlers.RealTime, isolator.CancellationToken);
                            }
                            catch (Exception err)
                            {
                                //Debugging at this level is difficult, stack trace needed.
                                Log.Error(err);
                                algorithm.RunTimeError = err;
                                algorithmManager.SetStatus(AlgorithmStatus.RuntimeError);
                                return;
                            }

                            Log.Trace("Engine.Run(): Exiting Algorithm Manager");
                        }, job.RamAllocation);

                        if (!complete)
                        {
                            Log.Error("Engine.Main(): Failed to complete in time: " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F"));
                            throw new Exception("Failed to complete algorithm within " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F")
                                + " seconds. Please make it run faster.");
                        }

                        // Algorithm runtime error:
                        if (algorithm.RunTimeError != null)
                        {
                            throw algorithm.RunTimeError;
                        }
                    }
                    catch (Exception err)
                    {
                        //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed.
                        Log.Error("Engine.Run(): Breaking out of parent try-catch: " + err.Message + " " + err.StackTrace);
                        if (_algorithmHandlers.DataFeed != null) _algorithmHandlers.DataFeed.Exit();
                        if (_algorithmHandlers.Results != null)
                        {
                            var message = "Runtime Error: " + err.Message;
                            Log.Trace("Engine.Run(): Sending runtime error to user...");
                            _algorithmHandlers.Results.LogMessage(message);
                            _algorithmHandlers.Results.RuntimeError(message, err.StackTrace);
                            _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, message + " Stack Trace: " + err.StackTrace);
                        }
                    }

                    //Send result data back: this entire code block could be rewritten.
                    // todo: - Split up statistics class, its enormous.
                    // todo: - Make a dedicated Statistics.Benchmark class.
                    // todo: - Move all creation and transmission of statistics out of primary engine loop.
                    // todo: - Statistics.Generate(algorithm, resulthandler, transactionhandler);

                    try
                    {
                        var trades = algorithm.TradeBuilder.ClosedTrades;
                        var charts = new Dictionary<string, Chart>(_algorithmHandlers.Results.Charts);
                        var orders = new Dictionary<int, Order>(_algorithmHandlers.Transactions.Orders);
                        var holdings = new Dictionary<string, Holding>();
                        var banner = new Dictionary<string, string>();
                        var statisticsResults = new StatisticsResults();

                        try
                        {
                            //Generates error when things don't exist (no charting logged, runtime errors in main algo execution)
                            const string strategyEquityKey = "Strategy Equity";
                            const string equityKey = "Equity";
                            const string dailyPerformanceKey = "Daily Performance";
                            const string benchmarkKey = "Benchmark";

                            // make sure we've taken samples for these series before just blindly requesting them
                            if (charts.ContainsKey(strategyEquityKey) &&
                                charts[strategyEquityKey].Series.ContainsKey(equityKey) &&
                                charts[strategyEquityKey].Series.ContainsKey(dailyPerformanceKey))
                            {
                                var equity = charts[strategyEquityKey].Series[equityKey].Values;
                                var performance = charts[strategyEquityKey].Series[dailyPerformanceKey].Values;
                                var profitLoss = new SortedDictionary<DateTime, decimal>(algorithm.Transactions.TransactionRecord);
                                var totalTransactions = algorithm.Transactions.GetOrders(x => x.Status.IsFill()).Count();
                                var benchmark = charts[benchmarkKey].Series[benchmarkKey].Values;

                                statisticsResults = StatisticsBuilder.Generate(trades, profitLoss, equity, performance, benchmark,
                                    _algorithmHandlers.Setup.StartingPortfolioValue, algorithm.Portfolio.TotalFees, totalTransactions);
                            }
                        }
                        catch (Exception err)
                        {
                            Log.Error("Algorithm.Node.Engine(): Error generating statistics packet: " + err.Message);
                        }

                        //Diagnostics Completed, Send Result Packet:
                        var totalSeconds = (DateTime.Now - startTime).TotalSeconds;
                        var dataPoints = algorithmManager.DataPoints + _algorithmHandlers.HistoryProvider.DataPointCount;
                        _algorithmHandlers.Results.DebugMessage(
                            string.Format("Algorithm Id:({0}) completed in {1} seconds at {2}k data points per second. Processing total of {3} data points.",
                                job.AlgorithmId, totalSeconds.ToString("F2"), ((dataPoints/(double) 1000)/totalSeconds).ToString("F0"),
                                dataPoints.ToString("N0")));

                        _algorithmHandlers.Results.SendFinalResult(job, orders, algorithm.Transactions.TransactionRecord, holdings, statisticsResults, banner);
                    }
                    catch (Exception err)
                    {
                        Log.Error("Engine.Main(): Error sending analysis result: " + err.Message + "  ST >> " + err.StackTrace);
                    }

                    //Before we return, send terminate commands to close up the threads
                    _algorithmHandlers.Transactions.Exit();
                    _algorithmHandlers.DataFeed.Exit();
                    _algorithmHandlers.RealTime.Exit();
                }

                //Close result handler:
                _algorithmHandlers.Results.Exit();
                statusPing.Exit();

                //Wait for the threads to complete:
                var ts = Stopwatch.StartNew();
                while ((_algorithmHandlers.Results.IsActive
                    || (_algorithmHandlers.Transactions != null && _algorithmHandlers.Transactions.IsActive)
                    || (_algorithmHandlers.DataFeed != null && _algorithmHandlers.DataFeed.IsActive)
                    || (_algorithmHandlers.RealTime != null && _algorithmHandlers.RealTime.IsActive))
                    && ts.ElapsedMilliseconds < 30*1000)
                {
                    Thread.Sleep(100);
                    Log.Trace("Waiting for threads to exit...");
                }

                //Terminate threads still in active state.
                if (threadFeed != null && threadFeed.IsAlive) threadFeed.Abort();
                if (threadTransactions != null && threadTransactions.IsAlive) threadTransactions.Abort();
                if (threadResults != null && threadResults.IsAlive) threadResults.Abort();
                if (statusPingThread != null && statusPingThread.IsAlive) statusPingThread.Abort();

                if (brokerage != null)
                {
                    brokerage.Disconnect();
                }
                if (_algorithmHandlers.Setup != null)
                {
                    _algorithmHandlers.Setup.Dispose();
                }
                Log.Trace("Engine.Main(): Analysis Completed and Results Posted.");
            }
            catch (Exception err)
            {
                Log.Error("Engine.Main(): Error running algorithm: " + err.Message + " >> " + err.StackTrace);
            }
            finally
            {
                //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions:
                if (_liveMode && algorithmManager.State != AlgorithmStatus.Running && algorithmManager.State != AlgorithmStatus.RuntimeError)
                    _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, algorithmManager.State);

                _algorithmHandlers.Results.Exit();
                _algorithmHandlers.DataFeed.Exit();
                _algorithmHandlers.Transactions.Exit();
                _algorithmHandlers.RealTime.Exit();
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// Syncs the specified subscriptions. The frontier time used for synchronization is
        /// managed internally and dependent upon previous synchronization operations.
        /// </summary>
        /// <param name="subscriptions">The subscriptions to sync</param>
        public TimeSlice Sync(IEnumerable <Subscription> subscriptions)
        {
            long earlyBirdTicks;
            var  changes = SecurityChanges.None;
            var  data    = new List <DataFeedPacket>();
            // NOTE: Tight coupling in UniverseSelection.ApplyUniverseSelection
            var universeData = new Dictionary <Universe, BaseDataCollection>();
            var universeDataForTimeSliceCreate = new Dictionary <Universe, BaseDataCollection>();

            SecurityChanges newChanges;

            do
            {
                earlyBirdTicks = MaxDateTimeTicks;
                newChanges     = SecurityChanges.None;
                foreach (var subscription in subscriptions)
                {
                    if (subscription.EndOfStream)
                    {
                        OnSubscriptionFinished(subscription);
                        continue;
                    }

                    // prime if needed
                    if (subscription.Current == null)
                    {
                        if (!subscription.MoveNext())
                        {
                            OnSubscriptionFinished(subscription);
                            continue;
                        }
                    }

                    var packet = new DataFeedPacket(subscription.Security, subscription.Configuration);

                    while (subscription.Current.EmitTimeUtc <= _frontier)
                    {
                        packet.Add(subscription.Current.Data);

                        if (!subscription.MoveNext())
                        {
                            OnSubscriptionFinished(subscription);
                            break;
                        }
                    }

                    if (packet.Count > 0)
                    {
                        // we have new universe data to select based on, store the subscription data until the end
                        if (!subscription.IsUniverseSelectionSubscription)
                        {
                            data.Add(packet);
                        }
                        else
                        {
                            // assume that if the first item is a base data collection then the enumerator handled the aggregation,
                            // otherwise, load all the the data into a new collection instance
                            var packetBaseDataCollection = packet.Data[0] as BaseDataCollection;
                            var packetData = packetBaseDataCollection == null
                                ? packet.Data
                                : packetBaseDataCollection.Data;

                            BaseDataCollection collection;
                            if (universeData.TryGetValue(subscription.Universe, out collection))
                            {
                                collection.Data.AddRange(packetData);
                            }
                            else
                            {
                                if (packetBaseDataCollection is OptionChainUniverseDataCollection)
                                {
                                    var current = packetBaseDataCollection as OptionChainUniverseDataCollection;
                                    collection = new OptionChainUniverseDataCollection(_frontier, subscription.Configuration.Symbol, packetData, current?.Underlying);
                                }
                                else if (packetBaseDataCollection is FuturesChainUniverseDataCollection)
                                {
                                    collection = new FuturesChainUniverseDataCollection(_frontier, subscription.Configuration.Symbol, packetData);
                                }
                                else
                                {
                                    collection = new BaseDataCollection(_frontier, subscription.Configuration.Symbol, packetData);
                                }

                                universeData[subscription.Universe] = collection;
                            }
                        }
                    }

                    if (subscription.Current != null)
                    {
                        if (earlyBirdTicks == MaxDateTimeTicks)
                        {
                            earlyBirdTicks = subscription.Current.EmitTimeUtc.Ticks;
                        }
                        else
                        {
                            // take the earliest between the next piece of data or the current earliest bird
                            earlyBirdTicks = Math.Min(earlyBirdTicks, subscription.Current.EmitTimeUtc.Ticks);
                        }
                    }
                }

                foreach (var kvp in universeData)
                {
                    var universe           = kvp.Key;
                    var baseDataCollection = kvp.Value;
                    universeDataForTimeSliceCreate[universe] = baseDataCollection;
                    newChanges += _universeSelection.ApplyUniverseSelection(universe, _frontier, baseDataCollection);
                }

                changes += newChanges;
            }while (newChanges != SecurityChanges.None);

            var timeSlice = TimeSlice.Create(_frontier, _sliceTimeZone, _cashBook, data, changes, universeDataForTimeSliceCreate);

            // next frontier time
            _frontier = new DateTime(Math.Max(earlyBirdTicks, _frontier.Ticks), DateTimeKind.Utc);

            return(timeSlice);
        }
        /// <summary>
        /// Syncs the specifies subscriptions at the frontier time
        /// </summary>
        /// <param name="frontier">The time used for syncing, data in the future won't be included in this time slice</param>
        /// <param name="subscriptions">The subscriptions to sync</param>
        /// <param name="sliceTimeZone">The time zone of the created slice object</param>
        /// <param name="cashBook">The cash book, used for creating the cash book updates</param>
        /// <param name="nextFrontier">The next frontier time as determined by the first piece of data in the future ahead of the frontier.
        /// This value will equal DateTime.MaxValue when the subscriptions are all finished</param>
        /// <returns>A time slice for the specified frontier time</returns>
        public TimeSlice Sync(DateTime frontier, IEnumerable <Subscription> subscriptions, DateTimeZone sliceTimeZone, CashBook cashBook, out DateTime nextFrontier)
        {
            var changes = SecurityChanges.None;

            nextFrontier = DateTime.MaxValue;
            var earlyBirdTicks = nextFrontier.Ticks;
            var data           = new List <KeyValuePair <Security, List <BaseData> > >();

            SecurityChanges newChanges;

            do
            {
                newChanges = SecurityChanges.None;
                foreach (var subscription in subscriptions)
                {
                    if (subscription.EndOfStream)
                    {
                        OnSubscriptionFinished(subscription);
                        continue;
                    }

                    // prime if needed
                    if (subscription.Current == null)
                    {
                        if (!subscription.MoveNext())
                        {
                            OnSubscriptionFinished(subscription);
                            continue;
                        }
                    }

                    var cache = new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData>());
                    data.Add(cache);

                    var configuration      = subscription.Configuration;
                    var offsetProvider     = subscription.OffsetProvider;
                    var currentOffsetTicks = offsetProvider.GetOffsetTicks(frontier);
                    while (subscription.Current.EndTime.Ticks - currentOffsetTicks <= frontier.Ticks)
                    {
                        // we want bars rounded using their subscription times, we make a clone
                        // so we don't interfere with the enumerator's internal logic
                        var clone = subscription.Current.Clone(subscription.Current.IsFillForward);
                        clone.Time = clone.Time.ExchangeRoundDown(configuration.Increment, subscription.Security.Exchange.Hours, configuration.ExtendedMarketHours);
                        cache.Value.Add(clone);
                        if (!subscription.MoveNext())
                        {
                            OnSubscriptionFinished(subscription);
                            break;
                        }
                    }

                    // we have new universe data to select based on
                    if (subscription.IsUniverseSelectionSubscription && cache.Value.Count > 0)
                    {
                        // assume that if the first item is a base data collection then the enumerator handled the aggregation,
                        // otherwise, load all the the data into a new collection instance
                        var collection = cache.Value[0] as BaseDataCollection ?? new BaseDataCollection(frontier, subscription.Configuration.Symbol, cache.Value);
                        newChanges += _universeSelection.ApplyUniverseSelection(subscription.Universe, frontier, collection);
                    }

                    if (subscription.Current != null)
                    {
                        // take the earliest between the next piece of data or the next tz discontinuity
                        earlyBirdTicks = Math.Min(earlyBirdTicks, Math.Min(subscription.Current.EndTime.Ticks - currentOffsetTicks, offsetProvider.GetNextDiscontinuity()));
                    }
                }

                changes += newChanges;
            }while (newChanges != SecurityChanges.None);

            nextFrontier = new DateTime(Math.Max(earlyBirdTicks, frontier.Ticks), DateTimeKind.Utc);

            return(TimeSlice.Create(frontier, sliceTimeZone, cashBook, data, changes));
        }
Exemplo n.º 9
0
        /// <summary>
        /// Main routine for datafeed analysis.
        /// </summary>
        /// <remarks>This is a hot-thread and should be kept extremely lean. Modify with caution.</remarks>
        public void Run()
        {
            var universeSelectionMarkets = new List <string> {
                "usa"
            };
            var frontier = DateTime.MaxValue;

            try
            {
                // don't initialize universe selection if it's not requested
                if (_algorithm.Universe != null)
                {
                    // initialize subscriptions used for universe selection
                    foreach (var market in universeSelectionMarkets)
                    {
                        AddSubscriptionForUniverseSelectionMarket(market);
                    }
                }

                // compute initial frontier time
                frontier = GetInitialFrontierTime();

                Log.Trace(string.Format("FileSystemDataFeed.Run(): Begin: {0} UTC", frontier));
                // continue to loop over each subscription, enqueuing data in time order
                while (!_cancellationTokenSource.IsCancellationRequested)
                {
                    var changes        = SecurityChanges.None;
                    var earlyBirdTicks = long.MaxValue;
                    var data           = new List <KeyValuePair <Security, List <BaseData> > >();

                    foreach (var subscription in Subscriptions)
                    {
                        if (subscription.EndOfStream)
                        {
                            // skip subscriptions that are finished
                            continue;
                        }

                        var cache = new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData>());
                        data.Add(cache);

                        var currentOffsetTicks = subscription.OffsetProvider.GetOffsetTicks(frontier);
                        while (subscription.Current.EndTime.Ticks - currentOffsetTicks <= frontier.Ticks)
                        {
                            // we want bars rounded using their subscription times, we make a clone
                            // so we don't interfere with the enumerator's internal logic
                            var clone = subscription.Current.Clone(subscription.Current.IsFillForward);
                            clone.Time = clone.Time.RoundDown(subscription.Configuration.Increment);
                            cache.Value.Add(clone);
                            if (!subscription.MoveNext())
                            {
                                Log.Trace("FileSystemDataFeed.Run(): Finished subscription: " + subscription.Security.Symbol + " at " + frontier + " UTC");
                                break;
                            }
                        }

                        // we have new universe data to select based on
                        if (subscription.IsFundamentalSubscription && cache.Value.Count > 0)
                        {
                            // always wait for other thread
                            if (!Bridge.Wait(Timeout.Infinite, _cancellationTokenSource.Token))
                            {
                                break;
                            }

                            changes += _universeSelection.ApplyUniverseSelection(cache.Value[0].EndTime.Date, cache.Value.OfType <CoarseFundamental>());
                        }

                        if (subscription.Current != null)
                        {
                            earlyBirdTicks = Math.Min(earlyBirdTicks, subscription.Current.EndTime.Ticks - currentOffsetTicks);
                        }
                    }

                    if (earlyBirdTicks == long.MaxValue)
                    {
                        // there's no more data to pull off, we're done
                        break;
                    }

                    // enqueue our next time slice and set the frontier for the next
                    Bridge.Add(TimeSlice.Create(_algorithm, frontier, data, changes), _cancellationTokenSource.Token);

                    // never go backwards in time, so take the max between early birds and the current frontier
                    frontier = new DateTime(Math.Max(earlyBirdTicks, frontier.Ticks), DateTimeKind.Utc);
                }

                if (!_cancellationTokenSource.IsCancellationRequested)
                {
                    Bridge.CompleteAdding();
                }
            }
            catch (Exception err)
            {
                Log.Error("FileSystemDataFeed.Run(): Encountered an error: " + err.Message);
                if (!_cancellationTokenSource.IsCancellationRequested)
                {
                    Bridge.CompleteAdding();
                    _cancellationTokenSource.Cancel();
                }
            }
            finally
            {
                Log.Trace(string.Format("FileSystemDataFeed.Run(): Data Feed Completed at {0} UTC", frontier));

                //Close up all streams:
                foreach (var subscription in Subscriptions)
                {
                    subscription.Dispose();
                }

                Log.Trace("FileSystemDataFeed.Run(): Ending Thread... ");
                IsActive = false;
            }
        }
Exemplo n.º 10
0
        /// <summary>
        /// Execute the primary thread for retrieving stock data.
        /// 1. Subscribe to the streams requested.
        /// 2. Build bars or tick data requested, primary loop increment smallest possible.
        /// </summary>
        public void Run()
        {
            //Initialize:

            // Set up separate thread to handle stream and building packets:
            var streamThread = new Thread(StreamStoreConsumer);

            streamThread.Start();
            Thread.Sleep(5); // Wait a little for the other thread to init.

            // This thread converts data into bars "on" the second - assuring the bars are close as
            // possible to a second unit tradebar (starting at 0 milliseconds).
            var realtime = new RealTimeSynchronizedTimer(TimeSpan.FromSeconds(1), utcTriggerTime =>
            {
                // determine if we're on even time boundaries for data emit
                var onMinute = utcTriggerTime.Second == 0;
                var onHour   = onMinute && utcTriggerTime.Minute == 0;

                // Determine if this subscription needs to be archived:
                var items = new List <KeyValuePair <Security, List <BaseData> > >();

                var changes = SecurityChanges.None;

                var performedUniverseSelection = new HashSet <string>();
                foreach (var kvp in _subscriptions)
                {
                    var subscription = kvp.Value;

                    if (subscription.Configuration.Resolution == Resolution.Tick)
                    {
                        continue;
                    }

                    var localTime = new DateTime(utcTriggerTime.Ticks - subscription.OffsetProvider.GetOffsetTicks(utcTriggerTime));
                    var onDay     = onHour && localTime.Hour == 0;

                    // perform universe selection if requested on day changes (don't perform multiple times per market)
                    if (onDay && _algorithm.Universe != null && !performedUniverseSelection.Contains(subscription.Configuration.Symbol))
                    {
                        performedUniverseSelection.Add(subscription.Configuration.Symbol);
                        var coarse = UniverseSelection.GetCoarseFundamentals(subscription.Configuration.Market, subscription.TimeZone, localTime.Date, true);
                        changes    = _universeSelection.ApplyUniverseSelection(localTime.Date, coarse);
                    }

                    var triggerArchive = false;
                    switch (subscription.Configuration.Resolution)
                    {
                    case Resolution.Second:
                        triggerArchive = true;
                        break;

                    case Resolution.Minute:
                        triggerArchive = onMinute;
                        break;

                    case Resolution.Hour:
                        triggerArchive = onHour;
                        break;

                    case Resolution.Daily:
                        triggerArchive = onDay;
                        break;
                    }

                    if (triggerArchive)
                    {
                        var data = subscription.StreamStore.TriggerArchive(utcTriggerTime);
                        if (data != null)
                        {
                            items.Add(new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData> {
                                data
                            }));
                        }
                    }
                }

                // don't try to add if we're already cancelling
                if (_cancellationTokenSource.IsCancellationRequested)
                {
                    return;
                }
                Bridge.Add(TimeSlice.Create(_algorithm, utcTriggerTime, items, changes));
            });

            //Start the realtime sampler above
            realtime.Start();

            while (!_cancellationTokenSource.IsCancellationRequested && !_endOfBridges)
            {
                // main work of this class is done in the realtime and stream store consumer threads
                Thread.Sleep(1000);
            }

            //Dispose of the realtime clock.
            realtime.Stop();

            //Stop thread
            _isActive = false;

            //Exit Live DataStream Feed:
            Log.Trace("LiveTradingDataFeed.Run(): Exiting LiveTradingDataFeed Run Method");
        }
Exemplo n.º 11
0
        /// <summary>
        /// Syncs the specified subscriptions. The frontier time used for synchronization is
        /// managed internally and dependent upon previous synchronization operations.
        /// </summary>
        /// <param name="subscriptions">The subscriptions to sync</param>
        /// <param name="cancellationToken">The cancellation token to stop enumeration</param>
        public IEnumerable <TimeSlice> Sync(IEnumerable <Subscription> subscriptions,
                                            CancellationToken cancellationToken)
        {
            var delayedSubscriptionFinished = new Queue <Subscription>();

            while (!cancellationToken.IsCancellationRequested)
            {
                var changes = SecurityChanges.None;
                var data    = new List <DataFeedPacket>(1);
                // NOTE: Tight coupling in UniverseSelection.ApplyUniverseSelection
                Dictionary <Universe, BaseDataCollection> universeData = null; // lazy construction for performance
                var universeDataForTimeSliceCreate = new Dictionary <Universe, BaseDataCollection>();

                var frontierUtc = _timeProvider.GetUtcNow();
                _frontierTimeProvider.SetCurrentTimeUtc(frontierUtc);

                SecurityChanges newChanges;
                do
                {
                    newChanges = SecurityChanges.None;
                    foreach (var subscription in subscriptions)
                    {
                        if (subscription.EndOfStream)
                        {
                            OnSubscriptionFinished(subscription);
                            continue;
                        }

                        // prime if needed
                        if (subscription.Current == null)
                        {
                            if (!subscription.MoveNext())
                            {
                                OnSubscriptionFinished(subscription);
                                continue;
                            }
                        }

                        DataFeedPacket packet = null;

                        while (subscription.Current != null && subscription.Current.EmitTimeUtc <= frontierUtc)
                        {
                            if (packet == null)
                            {
                                // for performance, lets be selfish about creating a new instance
                                packet = new DataFeedPacket(
                                    subscription.Security,
                                    subscription.Configuration,
                                    subscription.RemovedFromUniverse
                                    );
                            }

                            // If our subscription is a universe, and we get a delisting event emitted for it, then
                            // the universe itself should be unselected and removed, because the Symbol that the
                            // universe is based on has been delisted. Doing the disposal here allows us to
                            // process the delisting at this point in time before emitting out to the algorithm.
                            // This is very useful for universes that can be delisted, such as ETF constituent
                            // universes (e.g. for ETF constituent universes, since the ETF itself is used to create
                            // the universe Symbol (and set as its underlying), once the ETF is delisted, the
                            // universe should cease to exist, since there are no more constituents of that ETF).
                            if (subscription.IsUniverseSelectionSubscription && subscription.Current.Data is Delisting)
                            {
                                subscription.Universes.Single().Dispose();
                            }

                            packet.Add(subscription.Current.Data);

                            if (!subscription.MoveNext())
                            {
                                delayedSubscriptionFinished.Enqueue(subscription);
                                break;
                            }
                        }

                        if (packet?.Count > 0)
                        {
                            // we have new universe data to select based on, store the subscription data until the end
                            if (!subscription.IsUniverseSelectionSubscription)
                            {
                                data.Add(packet);
                            }
                            else
                            {
                                // assume that if the first item is a base data collection then the enumerator handled the aggregation,
                                // otherwise, load all the the data into a new collection instance
                                var packetBaseDataCollection = packet.Data[0] as BaseDataCollection;
                                var packetData = packetBaseDataCollection == null
                                    ? packet.Data
                                    : packetBaseDataCollection.Data;

                                BaseDataCollection collection;
                                if (universeData != null &&
                                    universeData.TryGetValue(subscription.Universes.Single(), out collection))
                                {
                                    collection.AddRange(packetData);
                                }
                                else
                                {
                                    if (packetBaseDataCollection is OptionChainUniverseDataCollection)
                                    {
                                        var current = packetBaseDataCollection as OptionChainUniverseDataCollection;
                                        collection = new OptionChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData, current?.Underlying);
                                    }
                                    else if (packetBaseDataCollection is FuturesChainUniverseDataCollection)
                                    {
                                        collection = new FuturesChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData);
                                    }
                                    else
                                    {
                                        collection = new BaseDataCollection(frontierUtc, frontierUtc, subscription.Configuration.Symbol, packetData);
                                    }

                                    if (universeData == null)
                                    {
                                        universeData = new Dictionary <Universe, BaseDataCollection>();
                                    }
                                    universeData[subscription.Universes.Single()] = collection;
                                }
                            }
                        }

                        if (subscription.IsUniverseSelectionSubscription &&
                            subscription.Universes.Single().DisposeRequested)
                        {
                            var universe = subscription.Universes.Single();
                            // check if a universe selection isn't already scheduled for this disposed universe
                            if (universeData == null || !universeData.ContainsKey(universe))
                            {
                                if (universeData == null)
                                {
                                    universeData = new Dictionary <Universe, BaseDataCollection>();
                                }
                                // we force trigger one last universe selection for this disposed universe, so it deselects all subscriptions it added
                                universeData[universe] = new BaseDataCollection(frontierUtc, subscription.Configuration.Symbol);
                            }

                            // we need to do this after all usages of subscription.Universes
                            OnSubscriptionFinished(subscription);
                        }
                    }

                    if (universeData != null && universeData.Count > 0)
                    {
                        // if we are going to perform universe selection we emit an empty
                        // time pulse to align algorithm time with current frontier
                        yield return(_timeSliceFactory.CreateTimePulse(frontierUtc));

                        foreach (var kvp in universeData)
                        {
                            var universe           = kvp.Key;
                            var baseDataCollection = kvp.Value;
                            universeDataForTimeSliceCreate[universe] = baseDataCollection;
                            newChanges += _universeSelection.ApplyUniverseSelection(universe, frontierUtc, baseDataCollection);
                        }
                        universeData.Clear();
                    }

                    changes += newChanges;
                }while (newChanges != SecurityChanges.None ||
                        _universeSelection.AddPendingInternalDataFeeds(frontierUtc));

                var timeSlice = _timeSliceFactory.Create(frontierUtc, data, changes, universeDataForTimeSliceCreate);

                while (delayedSubscriptionFinished.Count > 0)
                {
                    // these subscriptions added valid data to the packet
                    // we need to trigger OnSubscriptionFinished after we create the TimeSlice
                    // else it will drop the data
                    var subscription = delayedSubscriptionFinished.Dequeue();
                    OnSubscriptionFinished(subscription);
                }

                yield return(timeSlice);
            }
        }