private void AddSingleItemToBridge(BaseData tick, int i) { Bridge.Add(new TimeSlice(tick.EndTime, new Dictionary <int, List <BaseData> > { { i, new List <BaseData> { tick } } })); }
private void AddSingleItemToBridge(Subscription subscription, BaseData tick) { // don't try to add if we're already cancelling if (_cancellationTokenSource.IsCancellationRequested) { return; } Bridge.Add(TimeSlice.Create(tick.EndTime.ConvertToUtc(subscription.TimeZone), _algorithm.TimeZone, _algorithm.Portfolio.CashBook, new List <KeyValuePair <Security, List <BaseData> > > { new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData> { tick }) }, SecurityChanges.None)); }
private void AddSingleItemToBridge(BaseData tick, int i) { // don't try to add if we're already cancelling if (_cancellationTokenSource.IsCancellationRequested) { return; } Bridge.Add(new TimeSlice(tick.EndTime.ConvertToUtc(Subscriptions[i].TimeZone), new Dictionary <int, List <BaseData> > { { i, new List <BaseData> { tick } } })); }
/// <summary> /// Primary entry point. /// </summary> public void Run() { IsActive = true; // we want to emit to the bridge minimally once a second since the data feed is // the heartbeat of the application, so this value will contain a second after // the last emit time, and if we pass this time, we'll emit even with no data var nextEmit = DateTime.MinValue; try { while (!_cancellationTokenSource.IsCancellationRequested) { // perform sleeps to wake up on the second? var frontier = _timeProvider.GetUtcNow(); _frontierTimeProvider.SetCurrentTime(frontier); var data = new List <KeyValuePair <Security, List <BaseData> > >(); foreach (var kvp in _subscriptions) { var subscription = kvp.Value; var cache = new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData>()); // dequeue data that is time stamped at or before this frontier while (subscription.MoveNext() && subscription.Current != null) { cache.Value.Add(subscription.Current); } // if we have data, add it to be added to the bridge if (cache.Value.Count > 0) { data.Add(cache); } // we have new universe data to select based on if (subscription.IsUniverseSelectionSubscription && cache.Value.Count > 0) { var universe = subscription.Universe; // always wait for other thread to sync up if (!Bridge.Wait(Timeout.Infinite, _cancellationTokenSource.Token)) { break; } // fire the universe selection event OnUniverseSelection(universe, subscription.Configuration, frontier, cache.Value); } } // check for cancellation if (_cancellationTokenSource.IsCancellationRequested) { return; } // emit on data or if we've elapsed a full second since last emit if (data.Count != 0 || frontier >= nextEmit) { Bridge.Add(TimeSlice.Create(frontier, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, data, _changes), _cancellationTokenSource.Token); // force emitting every second nextEmit = frontier.RoundDown(Time.OneSecond).Add(Time.OneSecond); } // reset our security changes _changes = SecurityChanges.None; // take a short nap Thread.Sleep(1); } } catch (Exception err) { Log.Error(err); _algorithm.RunTimeError = err; } IsActive = false; }
/// <summary> /// Execute the primary thread for retrieving stock data. /// 1. Subscribe to the streams requested. /// 2. Build bars or tick data requested, primary loop increment smallest possible. /// </summary> public void Run() { // Symbols requested: _symbols = (from security in _algorithm.Securities.Values where !security.IsDynamicallyLoadedData && (security.Type == SecurityType.Equity || security.Type == SecurityType.Forex) select security.Symbol).ToList <string>(); //Initialize: _streamStores = new Dictionary <int, StreamStore>(); for (var i = 0; i < Subscriptions.Count; i++) { var config = _subscriptions[i]; if (config.Resolution != Resolution.Tick) { _streamStores.Add(i, new StreamStore(config, _algorithm.Securities[config.Symbol])); } } Log.Trace(string.Format("LiveTradingDataFeed.Stream(): Initialized {0} stream stores.", _streamStores.Count)); // Set up separate thread to handle stream and building packets: var streamThread = new Thread(StreamStoreConsumer); streamThread.Start(); Thread.Sleep(5); // Wait a little for the other thread to init. // This thread converts data into bars "on" the second - assuring the bars are close as // possible to a second unit tradebar (starting at 0 milliseconds). var realtime = new RealTimeSynchronizedTimer(TimeSpan.FromSeconds(1), triggerTime => { // determine if we're on even time boundaries for data emit var onMinute = triggerTime.Second == 0; var onHour = onMinute && triggerTime.Minute == 0; var onDay = onHour && triggerTime.Hour == 0; // Determine if this subscription needs to be archived: var items = new Dictionary <int, List <BaseData> >(); for (var i = 0; i < Subscriptions.Count; i++) { // stream stores are only created for tick data and this timer thread is used // soley for dequeuing from the stream stores, this index, i, would be null if (Subscriptions[i].Resolution == Resolution.Tick) { continue; } bool triggerArchive = false; switch (_subscriptions[i].Resolution) { case Resolution.Second: triggerArchive = true; break; case Resolution.Minute: triggerArchive = onMinute; break; case Resolution.Hour: triggerArchive = onHour; break; case Resolution.Daily: triggerArchive = onDay; break; } if (triggerArchive) { _streamStores[i].TriggerArchive(triggerTime, _subscriptions[i].FillDataForward); BaseData data; var dataPoints = new List <BaseData>(); while (_streamStores[i].Queue.TryDequeue(out data)) { dataPoints.Add(data); } items[i] = dataPoints; } } Bridge.Add(new TimeSlice(triggerTime, items)); }); //Start the realtime sampler above realtime.Start(); while (!_exitTriggered && !_endOfBridges) { // main work of this class is done in the realtime and stream store consumer threads Thread.Sleep(1000); } //Dispose of the realtime clock. realtime.Stop(); //Stop thread _isActive = false; //Exit Live DataStream Feed: Log.Trace("LiveTradingDataFeed.Run(): Exiting LiveTradingDataFeed Run Method"); }
public void Add(string message) { bridge.Add(message); }
/// <summary> /// Execute the primary thread for retrieving stock data. /// 1. Subscribe to the streams requested. /// 2. Build bars or tick data requested, primary loop increment smallest possible. /// </summary> public void Run() { //Initialize: // Set up separate thread to handle stream and building packets: var streamThread = new Thread(StreamStoreConsumer); streamThread.Start(); Thread.Sleep(5); // Wait a little for the other thread to init. // This thread converts data into bars "on" the second - assuring the bars are close as // possible to a second unit tradebar (starting at 0 milliseconds). var realtime = new RealTimeSynchronizedTimer(TimeSpan.FromSeconds(1), utcTriggerTime => { // determine if we're on even time boundaries for data emit var onMinute = utcTriggerTime.Second == 0; var onHour = onMinute && utcTriggerTime.Minute == 0; // Determine if this subscription needs to be archived: var items = new List <KeyValuePair <Security, List <BaseData> > >(); var changes = SecurityChanges.None; var performedUniverseSelection = new HashSet <string>(); foreach (var kvp in _subscriptions) { var subscription = kvp.Value; if (subscription.Configuration.Resolution == Resolution.Tick) { continue; } var localTime = new DateTime(utcTriggerTime.Ticks - subscription.OffsetProvider.GetOffsetTicks(utcTriggerTime)); var onDay = onHour && localTime.Hour == 0; // perform universe selection if requested on day changes (don't perform multiple times per market) if (onDay && _algorithm.Universe != null && performedUniverseSelection.Add(subscription.Configuration.Market)) { var coarse = UniverseSelection.GetCoarseFundamentals(subscription.Configuration.Market, subscription.TimeZone, localTime.Date, true); OnFundamental(FundamentalType.Coarse, utcTriggerTime, subscription.Configuration, coarse.ToList()); } var triggerArchive = false; switch (subscription.Configuration.Resolution) { case Resolution.Second: triggerArchive = true; break; case Resolution.Minute: triggerArchive = onMinute; break; case Resolution.Hour: triggerArchive = onHour; break; case Resolution.Daily: triggerArchive = onDay; break; } if (triggerArchive) { var data = subscription.StreamStore.TriggerArchive(utcTriggerTime); if (data != null) { items.Add(new KeyValuePair <Security, List <BaseData> >(subscription.Security, new List <BaseData> { data })); } } } // don't try to add if we're already cancelling if (_cancellationTokenSource.IsCancellationRequested) { return; } Bridge.Add(TimeSlice.Create(utcTriggerTime, _algorithm.TimeZone, _algorithm.Portfolio.CashBook, items, changes)); }); //Start the realtime sampler above realtime.Start(); while (!_cancellationTokenSource.IsCancellationRequested && !_endOfBridges) { // main work of this class is done in the realtime and stream store consumer threads Thread.Sleep(1000); } //Dispose of the realtime clock. realtime.Stop(); //Stop thread _isActive = false; //Exit Live DataStream Feed: Log.Trace("LiveTradingDataFeed.Run(): Exiting LiveTradingDataFeed Run Method"); }
/// <summary> /// Crude implementation to connect and pull required data from MYSQL. /// This is not efficient at all but just seeks to provide 0.1 draft for others to build from. /// </summary> /// <remarks> /// Currently the MYSQL datafeed doesn't support fillforward but will just feed the data from dBase into algorithm. /// In the future we can write an IEnumerator{BaseData} for accessing the database /// </remarks> public void Run() { //Initialize MYSQL Connection: Connect(); while (!_exitTriggered && IsActive) { var frontierTicks = long.MaxValue; var items = new SortedDictionary <DateTime, Dictionary <int, List <BaseData> > >(); if (Bridge.Count >= 10000) { // gaurd against overflowing the bridge Thread.Sleep(5); continue; } for (var i = 0; i < Subscriptions.Count; i++) { if (EndOfBridge[i]) { // this subscription is done continue; } //With each subscription; fetch the next increment of data from the queues: var subscription = Subscriptions[i]; //Fetch our data from mysql var data = Query(string.Format("SELECT * " + "FROM equity_{0} " + "WHERE time >= '{1}' " + "AND time <= '{2}' " + "ORDER BY time ASC LIMIT 100", subscription.Symbol, _mySQLBridgeTime[i].ToString("u"), _endTime.ToString("u"))); //Comment out for live databases, where we should continue asking even if no data. if (data.Count == 0) { EndOfBridge[i] = true; continue; } // group and order the bars by the end time var bars = GenerateBars(subscription.Symbol, data); // load up our sorted dictionary of data to be put into the bridge foreach (var bar in bars) { Dictionary <int, List <BaseData> > dataDictionary; if (!items.TryGetValue(bar.EndTime, out dataDictionary)) { dataDictionary = new Dictionary <int, List <BaseData> >(); items[bar.EndTime] = dataDictionary; } List <BaseData> dataPoints; if (!dataDictionary.TryGetValue(i, out dataPoints)) { dataPoints = new List <BaseData>(); dataDictionary[i] = dataPoints; } dataPoints.Add(bar); } //Record the furthest moment in time. _mySQLBridgeTime[i] = bars.Max(bar => bar.Time); frontierTicks = Math.Min(frontierTicks, bars.Min(bar => bar.EndTime.Ticks)); } if (frontierTicks == long.MaxValue) { // we didn't get anything from the database so we're finished break; } var frontier = new DateTime(frontierTicks); Dictionary <int, List <BaseData> > timeSlice; if (items.TryGetValue(frontier, out timeSlice)) { Bridge.Add(new TimeSlice(frontier, timeSlice)); } } LoadingComplete = true; _connection.Close(); IsActive = false; }