/// <summary> /// Wrapper for <see cref = "IAlgorithm.Debug" /> in Python /// </summary> /// <param name="message"></param> public void Debug(string message) { using (Py.GIL()) { _algorithm.Debug(message); } }
/// <summary> /// Process the brokerage message event. Trigger any actions in the algorithm or notifications system required. /// </summary> /// <param name="message">Message object</param> public void Handle(BrokerageMessageEvent message) { var toLog = $"{_algo.Time.ToStringInvariant("o")} Event: {message.Message}"; _algo.Debug(toLog); _algo.Log(toLog); }
/// <summary> /// Process the brokerage message event. Trigger any actions in the algorithm or notifications system required. /// </summary> /// <param name="message">Message object</param> public void Handle(BrokerageMessageEvent message) { var toLog = _algo.Time.ToString("o") + " Event: " + message.Message; _algo.Debug(toLog); _algo.Log(toLog); }
private void AddUnrequestedSecurity(IAlgorithm algorithm, Symbol symbol) { if (!algorithm.Portfolio.ContainsKey(symbol)) { var resolution = algorithm.UniverseSettings.Resolution; if (!_notifiedDefaultResolutionUsed) { // let's just send the message once _notifiedDefaultResolutionUsed = true; algorithm.Debug($"Will use UniverseSettings.Resolution value '{resolution}' for automatically added securities for open orders and holdings."); } Log.Trace("BrokerageSetupHandler.Setup(): Adding unrequested security: " + symbol.Value); if (symbol.SecurityType == SecurityType.Option || symbol.SecurityType == SecurityType.FutureOption) { // add current option contract to the system algorithm.AddOptionContract(symbol, resolution, true, 1.0m); } else if (symbol.SecurityType == SecurityType.Future) { // add current future contract to the system algorithm.AddFutureContract(symbol, resolution, true, 1.0m); } else { // for items not directly requested set leverage to 1 and at the min resolution algorithm.AddSecurity(symbol.SecurityType, symbol.Value, resolution, symbol.ID.Market, true, 1.0m, false); } } }
/// <summary> /// We want to make algorithm messages end up in both the standard regression log file {algorithm}.{language}.log /// as well as the details log {algorithm}.{language}.details.log. The details log is focused on providing a log /// dedicated solely to the algorithm's behavior, void of all <see cref="QuantConnect.Logging.Log"/> messages /// </summary> protected override void ConfigureConsoleTextWriter(IAlgorithm algorithm) { // configure Console.WriteLine and Console.Error.WriteLine to both logs, syslog and details.log // when 'forward-console-messages' is set to false, it guarantees synchronous logging of these messages if (Config.GetBool("forward-console-messages", true)) { // we need to forward Console.Write messages to the algorithm's Debug function Console.SetOut(new FuncTextWriter(msg => { algorithm.Debug(msg); WriteLine($"DEBUG: {msg}"); })); Console.SetError(new FuncTextWriter(msg => { algorithm.Error(msg); WriteLine($"ERROR: {msg}"); })); } else { // we need to forward Console.Write messages to the standard Log functions Console.SetOut(new FuncTextWriter(msg => { Log.Trace(msg); WriteLine($"DEBUG: {msg}"); })); Console.SetError(new FuncTextWriter(msg => { Log.Error(msg); WriteLine($"ERROR: {msg}"); })); } }
private bool LoadCashBalance(LiveNodePacket liveJob, IBrokerage brokerage, IAlgorithm algorithm) { Log.Trace("BrokerageSetupHandler.Setup(): Fetching cash balance from brokerage..."); try { // set the algorithm's cash balance for each currency var cashBalance = brokerage.GetCashBalance(); string maxCashLimitStr; if (liveJob.BrokerageData.TryGetValue("max-cash-limit", out maxCashLimitStr)) { var maxCashLimit = JsonConvert.DeserializeObject <HashSet <CashAmount> >(maxCashLimitStr); brokerage.DisableCashSync(); Log.Trace("BrokerageSetupHandler.Setup(): will use job packet max cash limit. Disabled cash sync."); foreach (var cash in maxCashLimit) { var brokerageCash = cashBalance.FirstOrDefault( brokerageCashAmount => string.Equals(brokerageCashAmount.Currency, cash.Currency, StringComparison.InvariantCultureIgnoreCase)); // we use the min amount between the brokerage and the job packet, if any if (brokerageCash != default(CashAmount)) { Log.Trace($"BrokerageSetupHandler.Setup(): Job packet amount {cash.Currency} {cash.Amount}. Brokerage amount {brokerageCash.Amount}."); var cashToUse = new CashAmount(Math.Min(cash.Amount, brokerageCash.Amount), cash.Currency); algorithm.Debug($"Live deployment has been allocation limited to {cashToUse.Amount:C} {cashToUse.Currency}"); algorithm.Portfolio.SetCash(cashToUse.Currency, cashToUse.Amount, 0); } else { Log.Trace($"BrokerageSetupHandler.Setup(): Skip setting {cash.Currency} brokerage does not have it."); } } } else { foreach (var cash in cashBalance) { Log.Trace($"BrokerageSetupHandler.Setup(): Setting {cash.Currency} cash to {cash.Amount}"); algorithm.Portfolio.SetCash(cash.Currency, cash.Amount, 0); } } } catch (Exception err) { Log.Error(err); AddInitializationError("Error getting cash balance from brokerage: " + err.Message, err); return(false); } return(true); }
/// <summary> /// Runs this command against the specified algorithm instance /// </summary> /// <param name="algorithm">The algorithm to run this command against</param> public void Run(IAlgorithm algorithm) { var request = new SubmitOrderRequest(OrderType, SecurityType, Symbol, Quantity, StopPrice, LimitPrice, DateTime.UtcNow, Tag); var ticket = algorithm.Transactions.ProcessRequest(request); var response = ticket.GetMostRecentOrderResponse(); var message = string.Format("{0} for {1} units of {2}: {3}", OrderType, Quantity, Symbol, response); if (response.IsSuccess) { algorithm.Debug(message); } else { algorithm.Error(message); } }
/// <summary> /// Runs this command against the specified algorithm instance /// </summary> /// <param name="algorithm">The algorithm to run this command against</param> public CommandResultPacket Run(IAlgorithm algorithm) { var request = new SubmitOrderRequest(OrderType, SecurityType, Symbol, Quantity, StopPrice, LimitPrice, DateTime.UtcNow, Tag); var ticket = algorithm.Transactions.ProcessRequest(request); var response = ticket.GetMostRecentOrderResponse(); var message = string.Format("{0} for {1} units of {2}: {3}", OrderType, Quantity, Symbol, response); if (response.IsSuccess) { algorithm.Debug(message); } else { algorithm.Error(message); } return new CommandResultPacket(this, response.IsSuccess); }
public static ScheduledEvent EveryAlgorithmEndOfDay(IAlgorithm algorithm, IResultHandler resultHandler, DateTime start, DateTime end, TimeSpan endOfDayDelta, DateTime?currentUtcTime = null) { if (endOfDayDelta >= Time.OneDay) { throw new ArgumentException("Delta must be less than a day", "endOfDayDelta"); } // set up an event to fire every tradeable date for the algorithm as a whole var eodEventTime = Time.OneDay.Subtract(endOfDayDelta); // create enumerable of end of day in algorithm's time zone var times = // for every date any exchange is open in the algorithm from date in Time.EachTradeableDay(algorithm.Securities.Values, start, end) // define the time of day we want the event to fire, a little before midnight let eventTime = date + eodEventTime // convert the event time into UTC let eventUtcTime = eventTime.ConvertToUtc(algorithm.TimeZone) // perform filter to verify it's not before the current time where !currentUtcTime.HasValue || eventUtcTime > currentUtcTime.Value select eventUtcTime; // Log a message warning the user this EOD will be deprecated soon algorithm.Debug("Usage of QCAlgorithm.OnEndOfDay() without a symbol will be deprecated August 2021. Always use a symbol when overriding this method: OnEndOfDay(symbol)"); return(new ScheduledEvent(CreateEventName("Algorithm", "EndOfDay"), times, (name, triggerTime) => { try { algorithm.OnEndOfDay(); } catch (Exception err) { resultHandler.RuntimeError($"Runtime error in {name} event: {err.Message}", err.StackTrace); Log.Error(err, $"ScheduledEvent.{name}:"); } })); }
private Security AddUnrequestedSecurity(IAlgorithm algorithm, Symbol symbol) { if (!algorithm.Securities.TryGetValue(symbol, out Security security)) { var resolution = algorithm.UniverseSettings.Resolution; var fillForward = algorithm.UniverseSettings.FillForward; var leverage = algorithm.UniverseSettings.Leverage; var extendedHours = algorithm.UniverseSettings.ExtendedMarketHours; if (!_notifiedUniverseSettingsUsed) { // let's just send the message once _notifiedUniverseSettingsUsed = true; algorithm.Debug($"Will use UniverseSettings for automatically added securities for open orders and holdings. UniverseSettings:" + $" Resolution = {resolution}; Leverage = {leverage}; FillForward = {fillForward}; ExtendedHours = {extendedHours}"); } Log.Trace("BrokerageSetupHandler.Setup(): Adding unrequested security: " + symbol.Value); if (symbol.SecurityType.IsOption()) { // add current option contract to the system security = algorithm.AddOptionContract(symbol, resolution, fillForward, leverage); } else if (symbol.SecurityType == SecurityType.Future) { // add current future contract to the system security = algorithm.AddFutureContract(symbol, resolution, fillForward, leverage); } else { // for items not directly requested set leverage to 1 and at the min resolution security = algorithm.AddSecurity(symbol.SecurityType, symbol.Value, resolution, symbol.ID.Market, fillForward, leverage, extendedHours); } } return(security); }
private IEnumerable <TimeSlice> Stream(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, IResultHandler results, CancellationToken cancellationToken) { bool setStartTime = false; var timeZone = algorithm.TimeZone; var history = algorithm.HistoryProvider; // get the required history job from the algorithm DateTime?lastHistoryTimeUtc = null; var historyRequests = algorithm.GetWarmupHistoryRequests().ToList(); // initialize variables for progress computation var start = DateTime.UtcNow.Ticks; var nextStatusTime = DateTime.UtcNow.AddSeconds(1); var minimumIncrement = algorithm.UniverseManager .Select(x => x.Value.Configuration.Resolution.ToTimeSpan()) .DefaultIfEmpty(Time.OneSecond) .Min(); minimumIncrement = minimumIncrement == TimeSpan.Zero ? Time.OneSecond : minimumIncrement; if (historyRequests.Count != 0) { // rewrite internal feed requests var subscriptions = algorithm.SubscriptionManager.Subscriptions.Where(x => !x.IsInternalFeed).ToList(); var minResolution = subscriptions.Count > 0 ? subscriptions.Min(x => x.Resolution) : Resolution.Second; foreach (var request in historyRequests) { Security security; if (algorithm.Securities.TryGetValue(request.Symbol, out security) && security.SubscriptionDataConfig.IsInternalFeed) { if (request.Resolution < minResolution) { request.Resolution = minResolution; request.FillForwardResolution = request.FillForwardResolution.HasValue ? minResolution : (Resolution?)null; } } } // rewrite all to share the same fill forward resolution if (historyRequests.Any(x => x.FillForwardResolution.HasValue)) { minResolution = historyRequests.Where(x => x.FillForwardResolution.HasValue).Min(x => x.FillForwardResolution.Value); foreach (var request in historyRequests.Where(x => x.FillForwardResolution.HasValue)) { request.FillForwardResolution = minResolution; } } foreach (var request in historyRequests) { start = Math.Min(request.StartTimeUtc.Ticks, start); Log.Trace(string.Format("AlgorithmManager.Stream(): WarmupHistoryRequest: {0}: Start: {1} End: {2} Resolution: {3}", request.Symbol, request.StartTimeUtc, request.EndTimeUtc, request.Resolution)); } // make the history request and build time slices foreach (var slice in history.GetHistory(historyRequests, timeZone)) { TimeSlice timeSlice; try { // we need to recombine this slice into a time slice var paired = new List <KeyValuePair <Security, List <BaseData> > >(); foreach (var symbol in slice.Keys) { var security = algorithm.Securities[symbol]; var data = slice[symbol]; var list = new List <BaseData>(); var ticks = data as List <Tick>; if (ticks != null) { list.AddRange(ticks); } else { list.Add(data); } paired.Add(new KeyValuePair <Security, List <BaseData> >(security, list)); } timeSlice = TimeSlice.Create(slice.Time.ConvertToUtc(timeZone), timeZone, algorithm.Portfolio.CashBook, paired, SecurityChanges.None); } catch (Exception err) { Log.Error(err); algorithm.RunTimeError = err; yield break; } if (timeSlice != null) { if (!setStartTime) { setStartTime = true; _previousTime = timeSlice.Time; algorithm.Debug("Algorithm warming up..."); } if (DateTime.UtcNow > nextStatusTime) { // send some status to the user letting them know we're done history, but still warming up, // catching up to real time data nextStatusTime = DateTime.UtcNow.AddSeconds(1); var percent = (int)(100 * (timeSlice.Time.Ticks - start) / (double)(DateTime.UtcNow.Ticks - start)); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.History, string.Format("Catching up to realtime {0}%...", percent)); } yield return(timeSlice); lastHistoryTimeUtc = timeSlice.Time; } } } // if we're not live or didn't event request warmup, then set us as not warming up if (!algorithm.LiveMode || historyRequests.Count == 0) { algorithm.SetFinishedWarmingUp(); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running); if (historyRequests.Count != 0) { algorithm.Debug("Algorithm finished warming up."); Log.Trace("AlgorithmManager.Stream(): Finished warmup"); } } foreach (var timeSlice in feed) { if (!setStartTime) { setStartTime = true; _previousTime = timeSlice.Time; } if (algorithm.LiveMode && algorithm.IsWarmingUp) { // this is hand-over logic, we spin up the data feed first and then request // the history for warmup, so there will be some overlap between the data if (lastHistoryTimeUtc.HasValue) { // make sure there's no historical data, this only matters for the handover var hasHistoricalData = false; foreach (var data in timeSlice.Slice.Ticks.Values.SelectMany(x => x).Concat <BaseData>(timeSlice.Slice.Bars.Values)) { // check if any ticks in the list are on or after our last warmup point, if so, skip this data if (data.EndTime.ConvertToUtc(algorithm.Securities[data.Symbol].Exchange.TimeZone) >= lastHistoryTimeUtc) { hasHistoricalData = true; break; } } if (hasHistoricalData) { continue; } // prevent us from doing these checks every loop lastHistoryTimeUtc = null; } // in live mode wait to mark us as finished warming up when // the data feed has caught up to now within the min increment if (timeSlice.Time > DateTime.UtcNow.Subtract(minimumIncrement)) { algorithm.SetFinishedWarmingUp(); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running); algorithm.Debug("Algorithm finished warming up."); Log.Trace("AlgorithmManager.Stream(): Finished warmup"); } else if (DateTime.UtcNow > nextStatusTime) { // send some status to the user letting them know we're done history, but still warming up, // catching up to real time data nextStatusTime = DateTime.UtcNow.AddSeconds(1); var percent = (int)(100 * (timeSlice.Time.Ticks - start) / (double)(DateTime.UtcNow.Ticks - start)); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.History, string.Format("Catching up to realtime {0}%...", percent)); } } yield return(timeSlice); } }
/// <summary> /// Send debug message /// </summary> /// <param name="message">String message</param> public void Debug(string message) => _baseAlgorithm.Debug(message);
/// <summary> /// Liquidate option contact holdings who's underlying security has split /// </summary> private void ProcessSplitSymbols(IAlgorithm algorithm, List <Split> splitWarnings, List <Delisting> pendingDelistings) { // NOTE: This method assumes option contracts have the same core trading hours as their underlying contract // This is a small performance optimization to prevent scanning every contract on every time step, // instead we scan just the underlyings, thereby reducing the time footprint of this methods by a factor // of N, the number of derivative subscriptions for (int i = splitWarnings.Count - 1; i >= 0; i--) { var split = splitWarnings[i]; var security = algorithm.Securities[split.Symbol]; if (!security.IsTradable && !algorithm.UniverseManager.ActiveSecurities.Keys.Contains(split.Symbol)) { Log.Debug($"AlgorithmManager.ProcessSplitSymbols(): {_algorithm.Time} - Removing split warning for {security.Symbol}"); // remove the warning from out list splitWarnings.RemoveAt(i); // Since we are storing the split warnings for a loop // we need to check if the security was removed. // When removed, it will be marked as non tradable but just in case // we expect it not to be an active security either continue; } var nextMarketClose = security.Exchange.Hours.GetNextMarketClose(security.LocalTime, false); // determine the latest possible time we can submit a MOC order var configs = algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(security.Symbol); if (configs.Count == 0) { // should never happen at this point, if it does let's give some extra info throw new Exception( $"AlgorithmManager.ProcessSplitSymbols(): {_algorithm.Time} - No subscriptions found for {security.Symbol}" + $", IsTradable: {security.IsTradable}" + $", Active: {algorithm.UniverseManager.ActiveSecurities.Keys.Contains(split.Symbol)}"); } var latestMarketOnCloseTimeRoundedDownByResolution = nextMarketClose.Subtract(MarketOnCloseOrder.SubmissionTimeBuffer) .RoundDownInTimeZone(configs.GetHighestResolution().ToTimeSpan(), security.Exchange.TimeZone, configs.First().DataTimeZone); // we don't need to do anyhing until the market closes if (security.LocalTime < latestMarketOnCloseTimeRoundedDownByResolution) { continue; } // fetch all option derivatives of the underlying with holdings (excluding the canonical security) var derivatives = algorithm.Securities.Where(kvp => kvp.Key.HasUnderlying && kvp.Key.SecurityType.IsOption() && kvp.Key.Underlying == security.Symbol && !kvp.Key.Underlying.IsCanonical() && kvp.Value.HoldStock ); foreach (var kvp in derivatives) { var optionContractSymbol = kvp.Key; var optionContractSecurity = (Option)kvp.Value; if (pendingDelistings.Any(x => x.Symbol == optionContractSymbol && x.Time.Date == optionContractSecurity.LocalTime.Date)) { // if the option is going to be delisted today we skip sending the market on close order continue; } // close any open orders algorithm.Transactions.CancelOpenOrders(optionContractSymbol, "Canceled due to impending split. Separate MarketOnClose order submitted to liquidate position."); var request = new SubmitOrderRequest(OrderType.MarketOnClose, optionContractSecurity.Type, optionContractSymbol, -optionContractSecurity.Holdings.Quantity, 0, 0, algorithm.UtcTime, "Liquidated due to impending split. Option splits are not currently supported." ); // send MOC order to liquidate option contract holdings algorithm.Transactions.AddOrder(request); // mark option contract as not tradable optionContractSecurity.IsTradable = false; algorithm.Debug($"MarketOnClose order submitted for option contract '{optionContractSymbol}' due to impending {split.Symbol.Value} split event. " + "Option splits are not currently supported."); } // remove the warning from out list splitWarnings.RemoveAt(i); } }
/// <summary> /// Applies universe selection the the data feed and algorithm /// </summary> /// <param name="universe">The universe to perform selection on</param> /// <param name="dateTimeUtc">The current date time in utc</param> /// <param name="universeData">The data provided to perform selection with</param> public SecurityChanges ApplyUniverseSelection(Universe universe, DateTime dateTimeUtc, BaseDataCollection universeData) { var algorithmEndDateUtc = _algorithm.EndDate.ConvertToUtc(_algorithm.TimeZone); if (dateTimeUtc > algorithmEndDateUtc) { return(SecurityChanges.None); } IEnumerable <Symbol> selectSymbolsResult; // check if this universe must be filtered with fine fundamental data var fineFiltered = universe as FineFundamentalFilteredUniverse; if (fineFiltered != null // if the universe has been disposed we don't perform selection. This us handled bellow by 'Universe.PerformSelection' // but in this case we directly call 'SelectSymbols' because we want to perform fine selection even if coarse returns the same // symbols, see 'Universe.PerformSelection', which detects this and returns 'Universe.Unchanged' && !universe.DisposeRequested) { // perform initial filtering and limit the result selectSymbolsResult = universe.SelectSymbols(dateTimeUtc, universeData); if (!ReferenceEquals(selectSymbolsResult, Universe.Unchanged)) { // prepare a BaseDataCollection of FineFundamental instances var fineCollection = new BaseDataCollection(); // Create a dictionary of CoarseFundamental keyed by Symbol that also has FineFundamental // Coarse raw data has SID collision on: CRHCY R735QTJ8XC9X var allCoarse = universeData.Data.OfType <CoarseFundamental>(); var coarseData = allCoarse.Where(c => c.HasFundamentalData) .DistinctBy(c => c.Symbol) .ToDictionary(c => c.Symbol); // Remove selected symbols that does not have fine fundamental data var anyDoesNotHaveFundamentalData = false; // only pre filter selected symbols if there actually is any coarse data. This way we can support custom universe filtered by fine fundamental data // which do not use coarse data as underlying, in which case it could happen that we try to load fine fundamental data that is missing, but no problem, // 'FineFundamentalSubscriptionEnumeratorFactory' won't emit it if (allCoarse.Any()) { selectSymbolsResult = selectSymbolsResult .Where( symbol => { var result = coarseData.ContainsKey(symbol); anyDoesNotHaveFundamentalData |= !result; return(result); } ); } if (!_anyDoesNotHaveFundamentalDataWarningLogged && anyDoesNotHaveFundamentalData) { _algorithm.Debug("Note: Your coarse selection filter was updated to exclude symbols without fine fundamental data. Make sure your coarse filter excludes symbols where HasFundamental is false."); _anyDoesNotHaveFundamentalDataWarningLogged = true; } // use all available threads, the entire system is waiting for this to complete var options = new ParallelOptions { MaxDegreeOfParallelism = Environment.ProcessorCount }; Parallel.ForEach(selectSymbolsResult, options, symbol => { var config = FineFundamentalUniverse.CreateConfiguration(symbol); var security = _securityService.CreateSecurity(symbol, config, addToSymbolCache: false); var localStartTime = dateTimeUtc.ConvertFromUtc(config.ExchangeTimeZone).AddDays(-1); var factory = new FineFundamentalSubscriptionEnumeratorFactory(_algorithm.LiveMode, x => new[] { localStartTime }); var request = new SubscriptionRequest(true, universe, security, new SubscriptionDataConfig(config), localStartTime, localStartTime); using (var enumerator = factory.CreateEnumerator(request, _dataProvider)) { if (enumerator.MoveNext()) { lock (fineCollection.Data) { fineCollection.Data.Add(enumerator.Current); } } } }); // WARNING -- HACK ATTACK -- WARNING // Fine universes are considered special due to their chaining behavior. // As such, we need a means of piping the fine data read in here back to the data feed // so that it can be properly emitted via a TimeSlice.Create call. There isn't a mechanism // in place for this function to return such data. The following lines are tightly coupled // to the universeData dictionaries in SubscriptionSynchronizer and LiveTradingDataFeed and // rely on reference semantics to work. universeData.Data = new List <BaseData>(); foreach (var fine in fineCollection.Data.OfType <FineFundamental>()) { var fundamentals = new Fundamentals { Symbol = fine.Symbol, Time = fine.Time, EndTime = fine.EndTime, DataType = fine.DataType, AssetClassification = fine.AssetClassification, CompanyProfile = fine.CompanyProfile, CompanyReference = fine.CompanyReference, EarningReports = fine.EarningReports, EarningRatios = fine.EarningRatios, FinancialStatements = fine.FinancialStatements, OperationRatios = fine.OperationRatios, SecurityReference = fine.SecurityReference, ValuationRatios = fine.ValuationRatios, Market = fine.Symbol.ID.Market }; CoarseFundamental coarse; if (coarseData.TryGetValue(fine.Symbol, out coarse)) { // the only time the coarse data won't exist is if the selection function // doesn't use the data provided, and instead returns a constant list of // symbols -- coupled with a potential hole in the data fundamentals.Value = coarse.Value; fundamentals.Volume = coarse.Volume; fundamentals.DollarVolume = coarse.DollarVolume; fundamentals.HasFundamentalData = coarse.HasFundamentalData; // set the fine fundamental price property to yesterday's closing price fine.Value = coarse.Value; } universeData.Data.Add(fundamentals); } // END -- HACK ATTACK -- END // perform the fine fundamental universe selection selectSymbolsResult = fineFiltered.FineFundamentalUniverse.PerformSelection(dateTimeUtc, fineCollection); } } else { // perform initial filtering and limit the result selectSymbolsResult = universe.PerformSelection(dateTimeUtc, universeData); } // materialize the enumerable into a set for processing var selections = selectSymbolsResult.ToHashSet(); var additions = new List <Security>(); var removals = new List <Security>(); // first check for no pending removals, even if the universe selection // didn't change we might need to remove a security because a position was closed RemoveSecurityFromUniverse( _pendingRemovalsManager.CheckPendingRemovals(selections, universe), removals, dateTimeUtc, algorithmEndDateUtc); // check for no changes second if (ReferenceEquals(selectSymbolsResult, Universe.Unchanged)) { return(SecurityChanges.None); } // determine which data subscriptions need to be removed from this universe foreach (var member in universe.Securities.Values.OrderBy(member => member.Security.Symbol.SecurityType)) { var security = member.Security; // if we've selected this subscription again, keep it if (selections.Contains(security.Symbol)) { continue; } // don't remove if the universe wants to keep him in if (!universe.CanRemoveMember(dateTimeUtc, security)) { continue; } // remove the member - this marks this member as not being // selected by the universe, but it may remain in the universe // until open orders are closed and the security is liquidated removals.Add(security); RemoveSecurityFromUniverse(_pendingRemovalsManager.TryRemoveMember(security, universe), removals, dateTimeUtc, algorithmEndDateUtc); } Dictionary <Symbol, Security> pendingAdditions; if (!_pendingSecurityAdditions.TryGetValue(dateTimeUtc, out pendingAdditions)) { // if the frontier moved forward then we've added these securities to the algorithm _pendingSecurityAdditions.Clear(); // keep track of created securities so we don't create the same security twice, leads to bad things :) pendingAdditions = new Dictionary <Symbol, Security>(); _pendingSecurityAdditions[dateTimeUtc] = pendingAdditions; } // find new selections and add them to the algorithm foreach (var symbol in selections) { if (universe.Securities.ContainsKey(symbol)) { // if its already part of the universe no need to re add it continue; } // create the new security, the algorithm thread will add this at the appropriate time Security security; if (!pendingAdditions.TryGetValue(symbol, out security) && !_algorithm.Securities.TryGetValue(symbol, out security)) { // For now this is required for retro compatibility with usages of security.Subscriptions var configs = _algorithm.SubscriptionManager.SubscriptionDataConfigService.Add(symbol, universe.UniverseSettings.Resolution, universe.UniverseSettings.FillForward, universe.UniverseSettings.ExtendedMarketHours, dataNormalizationMode: universe.UniverseSettings.DataNormalizationMode); security = _securityService.CreateSecurity(symbol, configs, universe.UniverseSettings.Leverage, (symbol.ID.SecurityType == SecurityType.Option || symbol.ID.SecurityType == SecurityType.FutureOption)); pendingAdditions.Add(symbol, security); } var addedSubscription = false; var dataFeedAdded = false; foreach (var request in universe.GetSubscriptionRequests(security, dateTimeUtc, algorithmEndDateUtc, _algorithm.SubscriptionManager.SubscriptionDataConfigService)) { if (security.Symbol == request.Configuration.Symbol && // Just in case check its the same symbol, else AddData will throw. !security.Subscriptions.Contains(request.Configuration)) { // For now this is required for retro compatibility with usages of security.Subscriptions security.AddData(request.Configuration); } var toRemove = _currencySubscriptionDataConfigManager.GetSubscriptionDataConfigToRemove(request.Configuration.Symbol); if (toRemove != null) { Log.Trace($"UniverseSelection.ApplyUniverseSelection(): Removing internal currency data feed {toRemove}"); _dataManager.RemoveSubscription(toRemove); } // 'dataFeedAdded' will help us notify the user for security changes only once per non internal subscription // for example two universes adding the sample configuration, we don't want two notifications dataFeedAdded = _dataManager.AddSubscription(request); // only update our security changes if we actually added data if (!request.IsUniverseSubscription) { addedSubscription = true; _internalSubscriptionManager.AddedSubscriptionRequest(request); } } if (addedSubscription) { var addedMember = universe.AddMember(dateTimeUtc, security); if (addedMember && dataFeedAdded) { additions.Add(security); } } } // return None if there's no changes, otherwise return what we've modified var securityChanges = additions.Count + removals.Count != 0 ? new SecurityChanges(additions, removals) : SecurityChanges.None; // Add currency data feeds that weren't explicitly added in Initialize if (additions.Count > 0) { EnsureCurrencyDataFeeds(securityChanges); } if (securityChanges != SecurityChanges.None && Log.DebuggingEnabled) { // for performance lets not create the message string if debugging is not enabled // this can be executed many times and its in the algorithm thread Log.Debug("UniverseSelection.ApplyUniverseSelection(): " + dateTimeUtc + ": " + securityChanges); } return(securityChanges); }
/// <summary> /// Handles the message /// </summary> /// <param name="message">The message to be handled</param> public void Handle(BrokerageMessageEvent message) { // based on message type dispatch to result handler switch (message.Type) { case BrokerageMessageType.Information: _algorithm.Debug($"Brokerage Info: {message.Message}"); break; case BrokerageMessageType.Warning: _algorithm.Error($"Brokerage Warning: {message.Message}"); break; case BrokerageMessageType.Error: _algorithm.Error($"Brokerage Error: {message.Message}"); _algorithm.RunTimeError = new Exception(message.Message); break; case BrokerageMessageType.Disconnect: _connected = false; Log.Trace("DefaultBrokerageMessageHandler.Handle(): Disconnected."); // check to see if any non-custom security exchanges are open within the next x minutes var open = (from kvp in _algorithm.Securities let security = kvp.Value where security.Type != SecurityType.Base let exchange = security.Exchange let localTime = _algorithm.UtcTime.ConvertFromUtc(exchange.TimeZone) where exchange.IsOpenDuringBar( localTime, localTime + _openThreshold, _algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(security.Symbol) .IsExtendedMarketHours()) select security).Any(); // if any are open then we need to kill the algorithm if (open) { Log.Trace("DefaultBrokerageMessageHandler.Handle(): Disconnect when exchanges are open, " + Invariant($"trying to reconnect for {_initialDelay.TotalMinutes} minutes.") ); // wait 15 minutes before killing algorithm StartCheckReconnected(_initialDelay, message); } else { Log.Trace("DefaultBrokerageMessageHandler.Handle(): Disconnect when exchanges are closed, checking back before exchange open."); // if they aren't open, we'll need to check again a little bit before markets open DateTime nextMarketOpenUtc; if (_algorithm.Securities.Count != 0) { nextMarketOpenUtc = (from kvp in _algorithm.Securities let security = kvp.Value where security.Type != SecurityType.Base let exchange = security.Exchange let localTime = _algorithm.UtcTime.ConvertFromUtc(exchange.TimeZone) let marketOpen = exchange.Hours.GetNextMarketOpen(localTime, _algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(security.Symbol) .IsExtendedMarketHours()) let marketOpenUtc = marketOpen.ConvertToUtc(exchange.TimeZone) select marketOpenUtc).Min(); } else { // if we have no securities just make next market open an hour from now nextMarketOpenUtc = DateTime.UtcNow.AddHours(1); } var timeUntilNextMarketOpen = nextMarketOpenUtc - DateTime.UtcNow - _openThreshold; Log.Trace(Invariant($"DefaultBrokerageMessageHandler.Handle(): TimeUntilNextMarketOpen: {timeUntilNextMarketOpen}")); // wake up 5 minutes before market open and check if we've reconnected StartCheckReconnected(timeUntilNextMarketOpen, message); } break; case BrokerageMessageType.Reconnect: _connected = true; Log.Trace("DefaultBrokerageMessageHandler.Handle(): Reconnected."); if (_cancellationTokenSource != null && !_cancellationTokenSource.IsCancellationRequested) { _cancellationTokenSource.Cancel(); } break; } }
private bool LoadExistingHoldingsAndOrders(LiveNodePacket liveJob, IBrokerage brokerage, IAlgorithm algorithm, SetupHandlerParameters parameters) { string loadExistingHoldings; if (liveJob.BrokerageData.TryGetValue("load-existing-holdings", out loadExistingHoldings) && !bool.Parse(loadExistingHoldings)) { Log.Trace("BrokerageSetupHandler.Setup(): Ignoring brokerage holdings and orders"); algorithm.Debug("Live deployment skipping loading of existing assets from the brokerage per deployment settings."); return(true); } var supportedSecurityTypes = new HashSet <SecurityType> { SecurityType.Equity, SecurityType.Forex, SecurityType.Cfd, SecurityType.Option, SecurityType.Future, SecurityType.FutureOption, SecurityType.Crypto }; Log.Trace("BrokerageSetupHandler.Setup(): Fetching open orders from brokerage..."); try { GetOpenOrders(algorithm, parameters.ResultHandler, parameters.TransactionHandler, brokerage, supportedSecurityTypes); } catch (Exception err) { Log.Error(err); AddInitializationError("Error getting open orders from brokerage: " + err.Message, err); return(false); } Log.Trace("BrokerageSetupHandler.Setup(): Fetching holdings from brokerage..."); try { var utcNow = DateTime.UtcNow; // populate the algorithm with the account's current holdings var holdings = brokerage.GetAccountHoldings(); // add options first to ensure raw data normalization mode is set on the equity underlyings foreach (var holding in holdings.OrderByDescending(x => x.Type)) { Log.Trace("BrokerageSetupHandler.Setup(): Has existing holding: " + holding); // verify existing holding security type if (!supportedSecurityTypes.Contains(holding.Type)) { Log.Error("BrokerageSetupHandler.Setup(): Unsupported security type: " + holding.Type + "-" + holding.Symbol.Value); AddInitializationError("Found unsupported security type in existing brokerage holdings: " + holding.Type + ". " + "QuantConnect currently supports the following security types: " + string.Join(",", supportedSecurityTypes)); // keep aggregating these errors continue; } AddUnrequestedSecurity(algorithm, holding.Symbol); var security = algorithm.Securities[holding.Symbol]; var exchangeTime = utcNow.ConvertFromUtc(security.Exchange.TimeZone); security.Holdings.SetHoldings(holding.AveragePrice, holding.Quantity); security.SetMarketPrice(new TradeBar { Time = exchangeTime, Open = holding.MarketPrice, High = holding.MarketPrice, Low = holding.MarketPrice, Close = holding.MarketPrice, Volume = 0, Symbol = holding.Symbol, DataType = MarketDataType.TradeBar }); } } catch (Exception err) { Log.Error(err); AddInitializationError("Error getting account holdings from brokerage: " + err.Message, err); return(false); } return(true); }
private IEnumerable <TimeSlice> Stream(IAlgorithm algorithm, ISynchronizer synchronizer, IResultHandler results, CancellationToken cancellationToken) { var nextWarmupStatusTime = DateTime.MinValue; var warmingUp = algorithm.IsWarmingUp; var warmingUpPercent = 0; if (warmingUp) { nextWarmupStatusTime = DateTime.UtcNow.AddSeconds(1); algorithm.Debug("Algorithm starting warm up..."); results.SendStatusUpdate(AlgorithmStatus.History, $"{warmingUpPercent}"); } else { results.SendStatusUpdate(AlgorithmStatus.Running); // let's be polite, and call warmup finished even though there was no warmup period and avoid algorithms having to handle it instead. // we trigger this callback here and not internally in the algorithm so that we can go through python if required algorithm.OnWarmupFinished(); } // bellow we compare with slice.Time which is in UTC var startTimeTicks = algorithm.UtcTime.Ticks; var warmupEndTicks = algorithm.StartDate.ConvertToUtc(algorithm.TimeZone).Ticks; // fulfilling history requirements of volatility models in live mode if (algorithm.LiveMode) { warmupEndTicks = DateTime.UtcNow.Ticks; ProcessVolatilityHistoryRequirements(algorithm); } foreach (var timeSlice in synchronizer.StreamData(cancellationToken)) { if (algorithm.IsWarmingUp) { var now = DateTime.UtcNow; if (now > nextWarmupStatusTime) { // send some status to the user letting them know we're done history, but still warming up, // catching up to real time data nextWarmupStatusTime = now.AddSeconds(2); var newPercent = (int)(100 * (timeSlice.Time.Ticks - startTimeTicks) / (double)(warmupEndTicks - startTimeTicks)); // if there isn't any progress don't send the same update many times if (newPercent != warmingUpPercent) { warmingUpPercent = newPercent; algorithm.Debug($"Processing algorithm warm-up request {warmingUpPercent}%..."); results.SendStatusUpdate(AlgorithmStatus.History, $"{warmingUpPercent}"); } } } else if (warmingUp) { // warmup finished, send an update warmingUp = false; // we trigger this callback here and not internally in the algorithm so that we can go through python if required algorithm.OnWarmupFinished(); algorithm.Debug("Algorithm finished warming up."); results.SendStatusUpdate(AlgorithmStatus.Running, "100"); } yield return(timeSlice); } }