private void AddSingleItemToBridge(Subscription subscription, BaseData tick) { // don't try to add if we're already cancelling if (_cancellationTokenSource.IsCancellationRequested) { return; } Bridge.Add(TimeSlice.Create(tick.EndTime.ConvertToUtc(subscription.TimeZone), _algorithm.TimeZone, _algorithm.Portfolio.CashBook, new List <KeyValuePair <Security, List <BaseData> > > { new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData> { tick }) }, SecurityChanges.None)); }
/// <summary> /// Primary entry point. /// </summary> public void Run() { IsActive = true; // we want to emit to the bridge minimally once a second since the data feed is // the heartbeat of the application, so this value will contain a second after // the last emit time, and if we pass this time, we'll emit even with no data var nextEmit = DateTime.MinValue; try { while (!_cancellationTokenSource.IsCancellationRequested) { // perform sleeps to wake up on the second? _frontierUtc = _timeProvider.GetUtcNow(); _frontierTimeProvider.SetCurrentTime(_frontierUtc); var data = new List <DataFeedPacket>(); foreach (var subscription in Subscriptions) { var packet = new DataFeedPacket(subscription.Security, subscription.Configuration); // dequeue data that is time stamped at or before this frontier while (subscription.MoveNext() && subscription.Current != null) { packet.Add(subscription.Current); } // if we have data, add it to be added to the bridge if (packet.Count > 0) { data.Add(packet); } // we have new universe data to select based on if (subscription.IsUniverseSelectionSubscription && packet.Count > 0) { var universe = subscription.Universe; // always wait for other thread to sync up if (!_bridge.WaitHandle.WaitOne(Timeout.Infinite, _cancellationTokenSource.Token)) { break; } // assume that if the first item is a base data collection then the enumerator handled the aggregation, // otherwise, load all the the data into a new collection instance var collection = packet.Data[0] as BaseDataCollection ?? new BaseDataCollection(_frontierUtc, subscription.Configuration.Symbol, packet.Data); _changes += _universeSelection.ApplyUniverseSelection(universe, _frontierUtc, collection); } } // check for cancellation if (_cancellationTokenSource.IsCancellationRequested) { return; } // emit on data or if we've elapsed a full second since last emit if (data.Count != 0 || _frontierUtc >= nextEmit) { _bridge.Add(TimeSlice.Create(_frontierUtc, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, data, _changes), _cancellationTokenSource.Token); // force emitting every second nextEmit = _frontierUtc.RoundDown(Time.OneSecond).Add(Time.OneSecond); } // reset our security changes _changes = SecurityChanges.None; // take a short nap Thread.Sleep(1); } } catch (Exception err) { Log.Error(err); _algorithm.RunTimeError = err; } Log.Trace("LiveTradingDataFeed.Run(): Exited thread."); IsActive = false; }
/// <summary> /// Syncs the specifies subscriptions at the frontier time /// </summary> /// <param name="frontier">The time used for syncing, data in the future won't be included in this time slice</param> /// <param name="subscriptions">The subscriptions to sync</param> /// <param name="sliceTimeZone">The time zone of the created slice object</param> /// <param name="cashBook">The cash book, used for creating the cash book updates</param> /// <param name="nextFrontier">The next frontier time as determined by the first piece of data in the future ahead of the frontier. /// This value will equal DateTime.MaxValue when the subscriptions are all finished</param> /// <returns>A time slice for the specified frontier time</returns> public TimeSlice Sync(DateTime frontier, IEnumerable <Subscription> subscriptions, DateTimeZone sliceTimeZone, CashBook cashBook, out DateTime nextFrontier) { var changes = SecurityChanges.None; nextFrontier = DateTime.MaxValue; var earlyBirdTicks = nextFrontier.Ticks; var data = new List <DataFeedPacket>(); var universeData = new Dictionary <Universe, BaseDataCollection>(); SecurityChanges newChanges; do { universeData.Clear(); newChanges = SecurityChanges.None; foreach (var subscription in subscriptions) { if (subscription.EndOfStream) { OnSubscriptionFinished(subscription); continue; } // prime if needed if (subscription.Current == null) { if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); continue; } } var packet = new DataFeedPacket(subscription.Security, subscription.Configuration); data.Add(packet); var configuration = subscription.Configuration; var offsetProvider = subscription.OffsetProvider; var currentOffsetTicks = offsetProvider.GetOffsetTicks(frontier); while (subscription.Current.EndTime.Ticks - currentOffsetTicks <= frontier.Ticks) { // we want bars rounded using their subscription times, we make a clone // so we don't interfere with the enumerator's internal logic var clone = subscription.Current.Clone(subscription.Current.IsFillForward); clone.Time = clone.Time.ExchangeRoundDown(configuration.Increment, subscription.Security.Exchange.Hours, configuration.ExtendedMarketHours); packet.Add(clone); if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); break; } } // we have new universe data to select based on, store the subscription data until the end if (subscription.IsUniverseSelectionSubscription && packet.Count > 0) { // assume that if the first item is a base data collection then the enumerator handled the aggregation, // otherwise, load all the the data into a new collection instance var packetBaseDataCollection = packet.Data[0] as BaseDataCollection; var packetData = packetBaseDataCollection == null ? packet.Data : packetBaseDataCollection.Data; BaseDataCollection collection; if (!universeData.TryGetValue(subscription.Universe, out collection)) { if (packetBaseDataCollection is OptionChainUniverseDataCollection) { var current = subscription.Current as OptionChainUniverseDataCollection; var underlying = current != null ? current.Underlying : null; collection = new OptionChainUniverseDataCollection(frontier, subscription.Configuration.Symbol, packetData, underlying); } else { collection = new BaseDataCollection(frontier, subscription.Configuration.Symbol, packetData); } universeData[subscription.Universe] = collection; } else { collection.Data.AddRange(packetData); } } if (subscription.Current != null) { // take the earliest between the next piece of data or the next tz discontinuity earlyBirdTicks = Math.Min(earlyBirdTicks, Math.Min(subscription.Current.EndTime.Ticks - currentOffsetTicks, offsetProvider.GetNextDiscontinuity())); } } foreach (var kvp in universeData) { var universe = kvp.Key; var baseDataCollection = kvp.Value; newChanges += _universeSelection.ApplyUniverseSelection(universe, frontier, baseDataCollection); } changes += newChanges; }while (newChanges != SecurityChanges.None); nextFrontier = new DateTime(Math.Max(earlyBirdTicks, frontier.Ticks), DateTimeKind.Utc); return(TimeSlice.Create(frontier, sliceTimeZone, cashBook, data, changes)); }
/// <summary> /// Primary entry point. /// </summary> public void Run() { IsActive = true; // we want to emit to the bridge minimally once a second since the data feed is // the heartbeat of the application, so this value will contain a second after // the last emit time, and if we pass this time, we'll emit even with no data var nextEmit = DateTime.MinValue; try { while (!_cancellationTokenSource.IsCancellationRequested) { // perform sleeps to wake up on the second? var frontier = _timeProvider.GetUtcNow(); _frontierTimeProvider.SetCurrentTime(frontier); var data = new List <KeyValuePair <Security, List <BaseData> > >(); foreach (var kvp in _subscriptions) { var subscription = kvp.Value; var cache = new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData>()); // dequeue data that is time stamped at or before this frontier while (subscription.MoveNext() && subscription.Current != null) { cache.Value.Add(subscription.Current); } // if we have data, add it to be added to the bridge if (cache.Value.Count > 0) { data.Add(cache); } // we have new universe data to select based on if (subscription.IsUniverseSelectionSubscription && cache.Value.Count > 0) { var universe = subscription.Universe; // always wait for other thread to sync up if (!Bridge.Wait(Timeout.Infinite, _cancellationTokenSource.Token)) { break; } // fire the universe selection event OnUniverseSelection(universe, subscription.Configuration, frontier, cache.Value); } } // check for cancellation if (_cancellationTokenSource.IsCancellationRequested) { return; } // emit on data or if we've elapsed a full second since last emit if (data.Count != 0 || frontier >= nextEmit) { Bridge.Add(TimeSlice.Create(frontier, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, data, _changes), _cancellationTokenSource.Token); // force emitting every second nextEmit = frontier.RoundDown(Time.OneSecond).Add(Time.OneSecond); } // reset our security changes _changes = SecurityChanges.None; // take a short nap Thread.Sleep(1); } } catch (Exception err) { Log.Error(err); _algorithm.RunTimeError = err; } IsActive = false; }
/// <summary> /// Primary entry point. /// </summary> public void Run() { IsActive = true; // we want to emit to the bridge minimally once a second since the data feed is // the heartbeat of the application, so this value will contain a second after // the last emit time, and if we pass this time, we'll emit even with no data var nextEmit = DateTime.MinValue; var syncer = new SubscriptionSynchronizer(_universeSelection, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, _frontierTimeProvider); syncer.SubscriptionFinished += (sender, subscription) => { RemoveSubscription(subscription.Configuration); Log.Debug($"LiveTradingDataFeed.SubscriptionFinished(): Finished subscription: {subscription.Configuration} at {_algorithm.UtcTime} UTC"); }; try { while (!_cancellationTokenSource.IsCancellationRequested) { // perform sleeps to wake up on the second? _frontierUtc = _timeProvider.GetUtcNow(); _frontierTimeProvider.SetCurrentTime(_frontierUtc); // always wait for other thread to sync up if (!_bridge.WaitHandle.WaitOne(Timeout.Infinite, _cancellationTokenSource.Token)) { break; } var timeSlice = syncer.Sync(Subscriptions); // check for cancellation if (_cancellationTokenSource.IsCancellationRequested) { return; } // emit on data or if we've elapsed a full second since last emit or there are security changes if (timeSlice.SecurityChanges != SecurityChanges.None || timeSlice.Data.Count != 0 || _frontierUtc >= nextEmit) { _bridge.Add(timeSlice, _cancellationTokenSource.Token); // force emitting every second nextEmit = _frontierUtc.RoundDown(Time.OneSecond).Add(Time.OneSecond); } // take a short nap Thread.Sleep(1); } } catch (Exception err) { Log.Error(err); _algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; // send last empty packet list before terminating, // so the algorithm manager has a chance to detect the runtime error // and exit showing the correct error instead of a timeout nextEmit = _frontierUtc.RoundDown(Time.OneSecond).Add(Time.OneSecond); if (!_cancellationTokenSource.IsCancellationRequested) { _bridge.Add( TimeSlice.Create(nextEmit, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, new List <DataFeedPacket>(), SecurityChanges.None, new Dictionary <Universe, BaseDataCollection>()), _cancellationTokenSource.Token); } } Log.Trace("LiveTradingDataFeed.Run(): Exited thread."); IsActive = false; }
/// <summary> /// Syncs the specified subscriptions. The frontier time used for synchronization is /// managed internally and dependent upon previous synchronization operations. /// </summary> /// <param name="subscriptions">The subscriptions to sync</param> public TimeSlice Sync(IEnumerable <Subscription> subscriptions) { long earlyBirdTicks; var changes = SecurityChanges.None; var data = new List <DataFeedPacket>(); // NOTE: Tight coupling in UniverseSelection.ApplyUniverseSelection var universeData = new Dictionary <Universe, BaseDataCollection>(); var universeDataForTimeSliceCreate = new Dictionary <Universe, BaseDataCollection>(); SecurityChanges newChanges; do { earlyBirdTicks = MaxDateTimeTicks; newChanges = SecurityChanges.None; foreach (var subscription in subscriptions) { if (subscription.EndOfStream) { OnSubscriptionFinished(subscription); continue; } // prime if needed if (subscription.Current == null) { if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); continue; } } var packet = new DataFeedPacket(subscription.Security, subscription.Configuration); while (subscription.Current.EmitTimeUtc <= _frontier) { packet.Add(subscription.Current.Data); if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); break; } } if (packet.Count > 0) { // we have new universe data to select based on, store the subscription data until the end if (!subscription.IsUniverseSelectionSubscription) { data.Add(packet); } else { // assume that if the first item is a base data collection then the enumerator handled the aggregation, // otherwise, load all the the data into a new collection instance var packetBaseDataCollection = packet.Data[0] as BaseDataCollection; var packetData = packetBaseDataCollection == null ? packet.Data : packetBaseDataCollection.Data; BaseDataCollection collection; if (universeData.TryGetValue(subscription.Universe, out collection)) { collection.Data.AddRange(packetData); } else { if (packetBaseDataCollection is OptionChainUniverseDataCollection) { var current = packetBaseDataCollection as OptionChainUniverseDataCollection; collection = new OptionChainUniverseDataCollection(_frontier, subscription.Configuration.Symbol, packetData, current?.Underlying); } else if (packetBaseDataCollection is FuturesChainUniverseDataCollection) { collection = new FuturesChainUniverseDataCollection(_frontier, subscription.Configuration.Symbol, packetData); } else { collection = new BaseDataCollection(_frontier, subscription.Configuration.Symbol, packetData); } universeData[subscription.Universe] = collection; } } } if (subscription.Current != null) { if (earlyBirdTicks == MaxDateTimeTicks) { earlyBirdTicks = subscription.Current.EmitTimeUtc.Ticks; } else { // take the earliest between the next piece of data or the current earliest bird earlyBirdTicks = Math.Min(earlyBirdTicks, subscription.Current.EmitTimeUtc.Ticks); } } } foreach (var kvp in universeData) { var universe = kvp.Key; var baseDataCollection = kvp.Value; universeDataForTimeSliceCreate[universe] = baseDataCollection; newChanges += _universeSelection.ApplyUniverseSelection(universe, _frontier, baseDataCollection); } changes += newChanges; }while (newChanges != SecurityChanges.None); var timeSlice = TimeSlice.Create(_frontier, _sliceTimeZone, _cashBook, data, changes, universeDataForTimeSliceCreate); // next frontier time _frontier = new DateTime(Math.Max(earlyBirdTicks, _frontier.Ticks), DateTimeKind.Utc); return(timeSlice); }
/// <summary> /// Primary entry point. /// </summary> public void Run() { IsActive = true; // we want to emit to the bridge minimally once a second since the data feed is // the heartbeat of the application, so this value will contain a second after // the last emit time, and if we pass this time, we'll emit even with no data var nextEmit = DateTime.MinValue; try { while (!_cancellationTokenSource.IsCancellationRequested) { // perform sleeps to wake up on the second? _frontierUtc = _timeProvider.GetUtcNow(); _frontierTimeProvider.SetCurrentTime(_frontierUtc); var data = new List <DataFeedPacket>(); // NOTE: Tight coupling in UniverseSelection.ApplyUniverseSelection var universeData = new Dictionary <Universe, BaseDataCollection>(); foreach (var subscription in Subscriptions) { var config = subscription.Configuration; var packet = new DataFeedPacket(subscription.Security, config, subscription.RemovedFromUniverse); // dequeue data that is time stamped at or before this frontier while (subscription.MoveNext() && subscription.Current != null) { packet.Add(subscription.Current.Data); } // if we have data, add it to be added to the bridge if (packet.Count > 0) { data.Add(packet); } // we have new universe data to select based on if (subscription.IsUniverseSelectionSubscription) { if (packet.Count > 0) { var universe = subscription.Universe; // always wait for other thread to sync up if (!_bridge.WaitHandle.WaitOne(Timeout.Infinite, _cancellationTokenSource.Token)) { break; } // assume that if the first item is a base data collection then the enumerator handled the aggregation, // otherwise, load all the the data into a new collection instance var collection = packet.Data[0] as BaseDataCollection ?? new BaseDataCollection(_frontierUtc, config.Symbol, packet.Data); BaseDataCollection existingCollection; if (universeData.TryGetValue(universe, out existingCollection)) { existingCollection.Data.AddRange(collection.Data); } else { universeData[universe] = collection; } _changes += _universeSelection.ApplyUniverseSelection(universe, _frontierUtc, collection); } // remove subscription for universe data if disposal requested AFTER time sync // this ensures we get any security changes from removing the universe and its children if (subscription.Universe.DisposeRequested) { RemoveSubscription(subscription.Configuration); } } } // check for cancellation if (_cancellationTokenSource.IsCancellationRequested) { return; } // emit on data or if we've elapsed a full second since last emit if (data.Count != 0 || _frontierUtc >= nextEmit) { _bridge.Add(TimeSlice.Create(_frontierUtc, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, data, _changes, universeData), _cancellationTokenSource.Token); // force emitting every second nextEmit = _frontierUtc.RoundDown(Time.OneSecond).Add(Time.OneSecond); } // reset our security changes _changes = SecurityChanges.None; // take a short nap Thread.Sleep(1); } } catch (Exception err) { Log.Error(err); _algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; // send last empty packet list before terminating, // so the algorithm manager has a chance to detect the runtime error // and exit showing the correct error instead of a timeout nextEmit = _frontierUtc.RoundDown(Time.OneSecond).Add(Time.OneSecond); if (!_cancellationTokenSource.IsCancellationRequested) { _bridge.Add( TimeSlice.Create(nextEmit, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, new List <DataFeedPacket>(), SecurityChanges.None, new Dictionary <Universe, BaseDataCollection>()), _cancellationTokenSource.Token); } } Log.Trace("LiveTradingDataFeed.Run(): Exited thread."); IsActive = false; }
/// <summary> /// Syncs the specifies subscriptions at the frontier time /// </summary> /// <param name="frontier">The time used for syncing, data in the future won't be included in this time slice</param> /// <param name="subscriptions">The subscriptions to sync</param> /// <param name="sliceTimeZone">The time zone of the created slice object</param> /// <param name="cashBook">The cash book, used for creating the cash book updates</param> /// <param name="nextFrontier">The next frontier time as determined by the first piece of data in the future ahead of the frontier. /// This value will equal DateTime.MaxValue when the subscriptions are all finished</param> /// <returns>A time slice for the specified frontier time</returns> public TimeSlice Sync(DateTime frontier, IEnumerable <Subscription> subscriptions, DateTimeZone sliceTimeZone, CashBook cashBook, out DateTime nextFrontier) { var changes = SecurityChanges.None; nextFrontier = DateTime.MaxValue; var earlyBirdTicks = nextFrontier.Ticks; var data = new List <KeyValuePair <Security, List <BaseData> > >(); SecurityChanges newChanges; do { newChanges = SecurityChanges.None; foreach (var subscription in subscriptions) { if (subscription.EndOfStream) { OnSubscriptionFinished(subscription); continue; } // prime if needed if (subscription.Current == null) { if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); continue; } } var cache = new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData>()); data.Add(cache); var configuration = subscription.Configuration; var offsetProvider = subscription.OffsetProvider; var currentOffsetTicks = offsetProvider.GetOffsetTicks(frontier); while (subscription.Current.EndTime.Ticks - currentOffsetTicks <= frontier.Ticks) { // we want bars rounded using their subscription times, we make a clone // so we don't interfere with the enumerator's internal logic var clone = subscription.Current.Clone(subscription.Current.IsFillForward); clone.Time = clone.Time.ExchangeRoundDown(configuration.Increment, subscription.Security.Exchange.Hours, configuration.ExtendedMarketHours); cache.Value.Add(clone); if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); break; } } // we have new universe data to select based on if (subscription.IsUniverseSelectionSubscription && cache.Value.Count > 0) { // assume that if the first item is a base data collection then the enumerator handled the aggregation, // otherwise, load all the the data into a new collection instance var collection = cache.Value[0] as BaseDataCollection ?? new BaseDataCollection(frontier, subscription.Configuration.Symbol, cache.Value); newChanges += _universeSelection.ApplyUniverseSelection(subscription.Universe, frontier, collection); } if (subscription.Current != null) { // take the earliest between the next piece of data or the next tz discontinuity earlyBirdTicks = Math.Min(earlyBirdTicks, Math.Min(subscription.Current.EndTime.Ticks - currentOffsetTicks, offsetProvider.GetNextDiscontinuity())); } } changes += newChanges; }while (newChanges != SecurityChanges.None); nextFrontier = new DateTime(Math.Max(earlyBirdTicks, frontier.Ticks), DateTimeKind.Utc); return(TimeSlice.Create(frontier, sliceTimeZone, cashBook, data, changes)); }
/// <summary> /// Execute the primary thread for retrieving stock data. /// 1. Subscribe to the streams requested. /// 2. Build bars or tick data requested, primary loop increment smallest possible. /// </summary> public void Run() { //Initialize: // Set up separate thread to handle stream and building packets: var streamThread = new Thread(StreamStoreConsumer); streamThread.Start(); Thread.Sleep(5); // Wait a little for the other thread to init. // This thread converts data into bars "on" the second - assuring the bars are close as // possible to a second unit tradebar (starting at 0 milliseconds). var realtime = new RealTimeSynchronizedTimer(TimeSpan.FromSeconds(1), utcTriggerTime => { // determine if we're on even time boundaries for data emit var onMinute = utcTriggerTime.Second == 0; var onHour = onMinute && utcTriggerTime.Minute == 0; // Determine if this subscription needs to be archived: var items = new List <KeyValuePair <Security, List <BaseData> > >(); var changes = SecurityChanges.None; var performedUniverseSelection = new HashSet <string>(); foreach (var kvp in _subscriptions) { var subscription = kvp.Value; if (subscription.Configuration.Resolution == Resolution.Tick) { continue; } var localTime = new DateTime(utcTriggerTime.Ticks - subscription.OffsetProvider.GetOffsetTicks(utcTriggerTime)); var onDay = onHour && localTime.Hour == 0; // perform universe selection if requested on day changes (don't perform multiple times per market) if (onDay && _algorithm.Universe != null && performedUniverseSelection.Add(subscription.Configuration.Market)) { var coarse = UniverseSelection.GetCoarseFundamentals(subscription.Configuration.Market, subscription.TimeZone, localTime.Date, true); OnFundamental(FundamentalType.Coarse, utcTriggerTime, subscription.Configuration, coarse.ToList()); } var triggerArchive = false; switch (subscription.Configuration.Resolution) { case Resolution.Second: triggerArchive = true; break; case Resolution.Minute: triggerArchive = onMinute; break; case Resolution.Hour: triggerArchive = onHour; break; case Resolution.Daily: triggerArchive = onDay; break; } if (triggerArchive) { var data = subscription.StreamStore.TriggerArchive(utcTriggerTime); if (data != null) { items.Add(new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData> { data })); } } } // don't try to add if we're already cancelling if (_cancellationTokenSource.IsCancellationRequested) { return; } Bridge.Add(TimeSlice.Create(utcTriggerTime, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, items, changes)); }); //Start the realtime sampler above realtime.Start(); while (!_cancellationTokenSource.IsCancellationRequested && !_endOfBridges) { // main work of this class is done in the realtime and stream store consumer threads Thread.Sleep(1000); } //Dispose of the realtime clock. realtime.Stop(); //Stop thread _isActive = false; //Exit Live DataStream Feed: Log.Trace("LiveTradingDataFeed.Run(): Exiting LiveTradingDataFeed Run Method"); }
/// <summary> /// Main routine for datafeed analysis. /// </summary> /// <remarks>This is a hot-thread and should be kept extremely lean. Modify with caution.</remarks> public void Run() { var universeSelectionMarkets = new List <string> { "usa" }; var frontier = DateTime.MaxValue; try { // don't initialize universe selection if it's not requested if (_algorithm.Universe != null) { // initialize subscriptions used for universe selection foreach (var market in universeSelectionMarkets) { AddSubscriptionForUniverseSelectionMarket(market); } } // compute initial frontier time frontier = GetInitialFrontierTime(); Log.Trace(string.Format("FileSystemDataFeed.Run(): Begin: {0} UTC", frontier)); // continue to loop over each subscription, enqueuing data in time order while (!_cancellationTokenSource.IsCancellationRequested) { var changes = SecurityChanges.None; var earlyBirdTicks = long.MaxValue; var data = new List <KeyValuePair <Security, List <BaseData> > >(); foreach (var subscription in Subscriptions) { if (subscription.EndOfStream) { // skip subscriptions that are finished continue; } var cache = new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData>()); data.Add(cache); var currentOffsetTicks = subscription.OffsetProvider.GetOffsetTicks(frontier); while (subscription.Current.EndTime.Ticks - currentOffsetTicks <= frontier.Ticks) { // we want bars rounded using their subscription times, we make a clone // so we don't interfere with the enumerator's internal logic var clone = subscription.Current.Clone(subscription.Current.IsFillForward); clone.Time = clone.Time.RoundDown(subscription.Configuration.Increment); cache.Value.Add(clone); if (!subscription.MoveNext()) { Log.Trace("FileSystemDataFeed.Run(): Finished subscription: " + subscription.Security.Symbol + " at " + frontier + " UTC"); break; } } // we have new universe data to select based on if (subscription.IsFundamentalSubscription && cache.Value.Count > 0) { // always wait for other thread if (!Bridge.Wait(Timeout.Infinite, _cancellationTokenSource.Token)) { break; } changes += _universeSelection.ApplyUniverseSelection(cache.Value[0].EndTime.Date, cache.Value.OfType <CoarseFundamental>()); } if (subscription.Current != null) { earlyBirdTicks = Math.Min(earlyBirdTicks, subscription.Current.EndTime.Ticks - currentOffsetTicks); } } if (earlyBirdTicks == long.MaxValue) { // there's no more data to pull off, we're done break; } // enqueue our next time slice and set the frontier for the next Bridge.Add(TimeSlice.Create(_algorithm, frontier, data, changes), _cancellationTokenSource.Token); // never go backwards in time, so take the max between early birds and the current frontier frontier = new DateTime(Math.Max(earlyBirdTicks, frontier.Ticks), DateTimeKind.Utc); } if (!_cancellationTokenSource.IsCancellationRequested) { Bridge.CompleteAdding(); } } catch (Exception err) { Log.Error("FileSystemDataFeed.Run(): Encountered an error: " + err.Message); if (!_cancellationTokenSource.IsCancellationRequested) { Bridge.CompleteAdding(); _cancellationTokenSource.Cancel(); } } finally { Log.Trace(string.Format("FileSystemDataFeed.Run(): Data Feed Completed at {0} UTC", frontier)); //Close up all streams: foreach (var subscription in Subscriptions) { subscription.Dispose(); } Log.Trace("FileSystemDataFeed.Run(): Ending Thread... "); IsActive = false; } }
/// <summary> /// Returns an enumerable which provides the data to stream to the algorithm /// </summary> public IEnumerable <TimeSlice> StreamData(CancellationToken cancellationToken) { PostInitialize(); var shouldSendExtraEmptyPacket = false; var nextEmit = DateTime.MinValue; var previousEmitTime = DateTime.MaxValue; while (!cancellationToken.IsCancellationRequested) { TimeSlice timeSlice; try { timeSlice = _subscriptionSynchronizer.Sync(_subscriptionManager.DataFeedSubscriptions); } catch (Exception err) { Log.Error(err); // notify the algorithm about the error, so it can be reported to the user _algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; shouldSendExtraEmptyPacket = _liveMode; break; } // check for cancellation if (cancellationToken.IsCancellationRequested) { break; } if (_liveMode) { var frontierUtc = FrontierTimeProvider.GetUtcNow(); // emit on data or if we've elapsed a full second since last emit or there are security changes if (timeSlice.SecurityChanges != SecurityChanges.None || timeSlice.Data.Count != 0 || frontierUtc >= nextEmit) { yield return(timeSlice); // force emitting every second since the data feed is // the heartbeat of the application nextEmit = frontierUtc.RoundDown(Time.OneSecond).Add(Time.OneSecond); } // take a short nap Thread.Sleep(1); } else { // SubscriptionFrontierTimeProvider will return twice the same time if there are no more subscriptions or if Subscription.Current is null if (timeSlice.Time != previousEmitTime) { previousEmitTime = timeSlice.Time; yield return(timeSlice); } else if (timeSlice.SecurityChanges == SecurityChanges.None) { // there's no more data to pull off, we're done (frontier is max value and no security changes) break; } } } if (shouldSendExtraEmptyPacket) { // send last empty packet list before terminating, // so the algorithm manager has a chance to detect the runtime error // and exit showing the correct error instead of a timeout nextEmit = previousEmitTime.RoundDown(Time.OneSecond).Add(Time.OneSecond); if (!cancellationToken.IsCancellationRequested) { var timeSlice = TimeSlice.Create( nextEmit, _dateTimeZone, _cashBook, new List <DataFeedPacket>(), SecurityChanges.None, new Dictionary <Universe, BaseDataCollection>()); yield return(timeSlice); } } Log.Trace("Synchronizer.GetEnumerator(): Exited thread."); }
/// <summary> /// Syncs the specified subscriptions. The frontier time used for synchronization is /// managed internally and dependent upon previous synchronization operations. /// </summary> /// <param name="subscriptions">The subscriptions to sync</param> public TimeSlice Sync(IEnumerable <Subscription> subscriptions) { var changes = SecurityChanges.None; var data = new List <DataFeedPacket>(); // NOTE: Tight coupling in UniverseSelection.ApplyUniverseSelection var universeData = new Dictionary <Universe, BaseDataCollection>(); var universeDataForTimeSliceCreate = new Dictionary <Universe, BaseDataCollection>(); var frontierUtc = _timeProvider.GetUtcNow(); SecurityChanges newChanges; do { newChanges = SecurityChanges.None; foreach (var subscription in subscriptions) { if (subscription.EndOfStream) { OnSubscriptionFinished(subscription); continue; } // prime if needed if (subscription.Current == null) { if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); continue; } } var packet = new DataFeedPacket(subscription.Security, subscription.Configuration, subscription.RemovedFromUniverse); while (subscription.Current != null && subscription.Current.EmitTimeUtc <= frontierUtc) { packet.Add(subscription.Current.Data); if (!subscription.MoveNext()) { OnSubscriptionFinished(subscription); break; } } if (packet.Count > 0) { // we have new universe data to select based on, store the subscription data until the end if (!subscription.IsUniverseSelectionSubscription) { data.Add(packet); } else { // assume that if the first item is a base data collection then the enumerator handled the aggregation, // otherwise, load all the the data into a new collection instance var packetBaseDataCollection = packet.Data[0] as BaseDataCollection; var packetData = packetBaseDataCollection == null ? packet.Data : packetBaseDataCollection.Data; BaseDataCollection collection; if (universeData.TryGetValue(subscription.Universe, out collection)) { collection.Data.AddRange(packetData); } else { if (packetBaseDataCollection is OptionChainUniverseDataCollection) { var current = packetBaseDataCollection as OptionChainUniverseDataCollection; collection = new OptionChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData, current?.Underlying); } else if (packetBaseDataCollection is FuturesChainUniverseDataCollection) { collection = new FuturesChainUniverseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData); } else { collection = new BaseDataCollection(frontierUtc, subscription.Configuration.Symbol, packetData); } universeData[subscription.Universe] = collection; } } } // remove subscription for universe data if disposal requested AFTER time sync // this ensures we get any security changes from removing the universe and its children if (subscription.IsUniverseSelectionSubscription && subscription.Universe.DisposeRequested) { OnSubscriptionFinished(subscription); } } foreach (var kvp in universeData) { var universe = kvp.Key; var baseDataCollection = kvp.Value; universeDataForTimeSliceCreate[universe] = baseDataCollection; newChanges += _universeSelection.ApplyUniverseSelection(universe, frontierUtc, baseDataCollection); } universeData.Clear(); changes += newChanges; }while (newChanges != SecurityChanges.None); var timeSlice = TimeSlice.Create(frontierUtc, _sliceTimeZone, _cashBook, data, changes, universeDataForTimeSliceCreate); return(timeSlice); }
/// <summary> /// Main routine for datafeed analysis. /// </summary> /// <remarks>This is a hot-thread and should be kept extremely lean. Modify with caution.</remarks> public void Run() { var universeSelectionMarkets = new List <string> { "usa" }; var frontier = DateTime.MaxValue; try { // don't initialize universe selection if it's not requested if (_algorithm.Universe != null) { // initialize subscriptions used for universe selection foreach (var market in universeSelectionMarkets) { AddSubscriptionForUniverseSelectionMarket(market); } } // compute initial frontier time frontier = GetInitialFrontierTime(); Log.Trace(string.Format("FileSystemDataFeed.Run(): Begin: {0} UTC", frontier)); // continue to loop over each subscription, enqueuing data in time order while (!_cancellationTokenSource.IsCancellationRequested) { // each time step reset our security changes _changes = SecurityChanges.None; var earlyBirdTicks = long.MaxValue; var data = new List <KeyValuePair <Security, List <BaseData> > >(); // we union subscriptions with itself so if subscriptions changes on the first // iteration we will pick up those changes in the union call, this is used in // universe selection. an alternative is to extract this into a method and check // to see if changes != SecurityChanges.None, and re-run all subscriptions again, // This was added as quick fix due to an issue found in universe selection regression alg foreach (var subscription in Subscriptions.Union(Subscriptions)) { if (subscription.EndOfStream) { // skip subscriptions that are finished continue; } var cache = new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData>()); data.Add(cache); var configuration = subscription.Configuration; var offsetProvider = subscription.OffsetProvider; var currentOffsetTicks = offsetProvider.GetOffsetTicks(frontier); while (subscription.Current.EndTime.Ticks - currentOffsetTicks <= frontier.Ticks) { // we want bars rounded using their subscription times, we make a clone // so we don't interfere with the enumerator's internal logic var clone = subscription.Current.Clone(subscription.Current.IsFillForward); clone.Time = clone.Time.ExchangeRoundDown(configuration.Increment, subscription.Security.Exchange.Hours, configuration.ExtendedMarketHours); cache.Value.Add(clone); if (!subscription.MoveNext()) { Log.Trace("FileSystemDataFeed.Run(): Finished subscription: " + subscription.Security.Symbol + " at " + frontier + " UTC"); break; } } // we have new universe data to select based on if (subscription.IsFundamentalSubscription && cache.Value.Count > 0) { // always wait for other thread if (!Bridge.Wait(Timeout.Infinite, _cancellationTokenSource.Token)) { break; } OnFundamental(FundamentalType.Coarse, frontier, configuration, cache.Value); } if (subscription.Current != null) { // take the earliest between the next piece of data or the next tz discontinuity earlyBirdTicks = Math.Min(earlyBirdTicks, Math.Min(subscription.Current.EndTime.Ticks - currentOffsetTicks, offsetProvider.GetNextDiscontinuity())); } } if (earlyBirdTicks == long.MaxValue) { // there's no more data to pull off, we're done break; } // enqueue our next time slice and set the frontier for the next Bridge.Add(TimeSlice.Create(frontier, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, data, _changes), _cancellationTokenSource.Token); // never go backwards in time, so take the max between early birds and the current frontier frontier = new DateTime(Math.Max(earlyBirdTicks, frontier.Ticks), DateTimeKind.Utc); } if (!_cancellationTokenSource.IsCancellationRequested) { Bridge.CompleteAdding(); } } catch (Exception err) { Log.Error("FileSystemDataFeed.Run(): Encountered an error: " + err.Message); if (!_cancellationTokenSource.IsCancellationRequested) { Bridge.CompleteAdding(); _cancellationTokenSource.Cancel(); } } finally { Log.Trace(string.Format("FileSystemDataFeed.Run(): Data Feed Completed at {0} UTC", frontier)); //Close up all streams: foreach (var subscription in Subscriptions) { subscription.Dispose(); } Log.Trace("FileSystemDataFeed.Run(): Ending Thread... "); IsActive = false; } }