public void LoadsHoldingsForExpectedMarket() { var symbol = Symbol.Create("AUDUSD", SecurityType.Forex, Market.Oanda); var algorithm = new TestAlgorithm(); algorithm.SetBrokerageModel(BrokerageName.InteractiveBrokersBrokerage); algorithm.SetHistoryProvider(new BrokerageTransactionHandlerTests.BrokerageTransactionHandlerTests.EmptyHistoryProvider()); var job = new LiveNodePacket { UserId = 1, ProjectId = 1, DeployId = "1", Brokerage = "PaperBrokerage", DataQueueHandler = "none" }; // Increasing RAM limit, else the tests fail. This is happening in master, when running all the tests together, locally (not travis). job.Controls.RamAllocation = 1024 * 1024 * 1024; var resultHandler = new Mock <IResultHandler>(); var transactionHandler = new Mock <ITransactionHandler>(); var realTimeHandler = new Mock <IRealTimeHandler>(); var brokerage = new Mock <IBrokerage>(); brokerage.Setup(x => x.IsConnected).Returns(true); brokerage.Setup(x => x.GetCashBalance()).Returns(new List <Cash>()); brokerage.Setup(x => x.GetAccountHoldings()).Returns(new List <Holding> { new Holding { Symbol = symbol, Type = symbol.SecurityType, Quantity = 100 } }); brokerage.Setup(x => x.GetOpenOrders()).Returns(new List <Order>()); var setupHandler = new BrokerageSetupHandler(); IBrokerageFactory factory; setupHandler.CreateBrokerage(job, algorithm, out factory); Assert.IsTrue(setupHandler.Setup(new SetupHandlerParameters(_dataManager.UniverseSelection, algorithm, brokerage.Object, job, resultHandler.Object, transactionHandler.Object, realTimeHandler.Object))); Security security; Assert.IsTrue(algorithm.Portfolio.Securities.TryGetValue(symbol, out security)); Assert.AreEqual(symbol, security.Symbol); }
public void Setup() { var sunday = new LocalMarketHours(DayOfWeek.Sunday, new TimeSpan(17, 0, 0), TimeSpan.FromTicks(Time.OneDay.Ticks - 1)); var monday = LocalMarketHours.OpenAllDay(DayOfWeek.Monday); var tuesday = LocalMarketHours.OpenAllDay(DayOfWeek.Tuesday); var wednesday = LocalMarketHours.OpenAllDay(DayOfWeek.Wednesday); var thursday = LocalMarketHours.OpenAllDay(DayOfWeek.Thursday); var friday = new LocalMarketHours(DayOfWeek.Friday, TimeSpan.Zero, new TimeSpan(17, 0, 0)); var earlyCloses = new Dictionary <DateTime, TimeSpan>(); var lateOpens = new Dictionary <DateTime, TimeSpan>(); _exchangeHours = new SecurityExchangeHours(TimeZones.NewYork, USHoliday.Dates.Select(x => x.Date), new[] { sunday, monday, tuesday, wednesday, thursday, friday }.ToDictionary(x => x.DayOfWeek), earlyCloses, lateOpens); _liveTradingDataFeed = new TestableLiveTradingDataFeed(); var jobPacket = new LiveNodePacket() { DeployId = "", Brokerage = BrokerageName.OandaBrokerage.ToString(), DataQueueHandler = "LiveDataQueue" }; var algo = new TestAlgorithm(); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var dataPermissionManager = new DataPermissionManager(); var dataManager = new DataManager(_liveTradingDataFeed, new UniverseSelection( algo, new SecurityService(algo.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, algo, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(algo.Portfolio)), dataPermissionManager), algo, algo.TimeKeeper, marketHoursDatabase, true, RegisteredSecurityDataTypesProvider.Null, dataPermissionManager); algo.SubscriptionManager.SetDataManager(dataManager); _liveSynchronizer = new LiveSynchronizer(); _liveSynchronizer.Initialize(algo, dataManager); _liveTradingDataFeed.Initialize(algo, jobPacket, new LiveTradingResultHandler(), new LocalDiskMapFileProvider(), null, new DefaultDataProvider(), dataManager, _liveSynchronizer, new DataChannelProvider()); algo.Initialize(); _config = SecurityTests.CreateTradeBarConfig(); }
/// <summary> /// Subscribe to a specific list of symbols /// </summary> /// <param name="job">Live job to subscribe with</param> /// <param name="symbols">List of symbols to subscribe to</param> public void Subscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { //Add the symbols to the list if they aren't there already. foreach (var symbol in symbols.Where(x => !x.Value.Contains("-UNIVERSE-"))) { if (symbol.ID.SecurityType == SecurityType.Equity || symbol.ID.SecurityType == SecurityType.Option) { if (_subscriptions.TryAdd(symbol, symbol.Value)) { Refresh(); } } } }
public void SetJob() { //Array IDQH var dataHandlers = Newtonsoft.Json.JsonConvert.SerializeObject(new[] { "FakeDataQueue" }); var jobWithArrayIDQH = new LiveNodePacket { Brokerage = "ZerodhaBrokerage", DataQueueHandler = dataHandlers }; var compositeDataQueueHandler = new DataQueueHandlerManager(); compositeDataQueueHandler.SetJob(jobWithArrayIDQH); compositeDataQueueHandler.Dispose(); }
/// <summary> /// Adds the specified symbols to the subscription /// </summary> /// <param name="job">Job we're subscribing for:</param> /// <param name="symbols">The symbols to be added keyed by SecurityType</param> public void Subscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { var symbolsToSubscribe = symbols.Where(x => !_subscribedSymbols.ContainsKey(x.Value)); foreach (var symbol in symbolsToSubscribe.Where(CanSubscribe)) { Log.Trace($"AlpacaBrokerage.Subscribe(): {symbol}"); _natsClient.SubscribeQuote(symbol.Value); _natsClient.SubscribeTrade(symbol.Value); _subscribedSymbols.TryAdd(symbol.Value, symbol); } }
/// <summary> /// Adds the specified symbols to the subscription: new IQLevel1WatchItem("IBM", true) /// </summary> /// <param name="job">Job we're subscribing for:</param> /// <param name="symbols">The symbols to be added keyed by SecurityType</param> public void Subscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { try { foreach (var symbol in symbols) { if (CanSubscribe(symbol)) { lock (_sync) { Log.Trace("IQFeed.Subscribe(): Subscribe Request: " + symbol.ToString()); if (_symbols.Add(symbol)) { // processing canonical option symbol to subscribe to underlying prices var subscribeSymbol = symbol; if (symbol.ID.SecurityType == SecurityType.Option && symbol.IsCanonical()) { subscribeSymbol = symbol.Underlying; _underlyings.Add(subscribeSymbol, symbol); } if (symbol.ID.SecurityType == SecurityType.Future && symbol.IsCanonical()) { // do nothing for now. Later might add continuous contract symbol. return; } var ticker = _symbolUniverse.GetBrokerageSymbol(subscribeSymbol); if (!string.IsNullOrEmpty(ticker)) { _level1Port.Subscribe(ticker); Log.Trace("IQFeed.Subscribe(): Subscribe Processed: {0} ({1})", symbol.Value, ticker); } else { Log.Error("IQFeed.Subscribe(): Symbol {0} was not found in IQFeed symbol universe", symbol.Value); } } } } } } catch (Exception err) { Log.Error("IQFeed.Subscribe(): " + err.Message); } }
public void LoadsExistingHoldingsAndOrders(Func <List <Holding> > getHoldings, Func <List <Order> > getOrders, bool expected) { var algorithm = new TestAlgorithm(); algorithm.SetHistoryProvider(new BrokerageTransactionHandlerTests.BrokerageTransactionHandlerTests.EmptyHistoryProvider()); var job = new LiveNodePacket { UserId = 1, ProjectId = 1, DeployId = "1", Brokerage = "PaperBrokerage", DataQueueHandler = "none" }; // Increasing RAM limit, else the tests fail. This is happening in master, when running all the tests together, locally (not travis). job.Controls.RamAllocation = 1024 * 1024 * 1024; var resultHandler = new Mock <IResultHandler>(); var transactionHandler = new Mock <ITransactionHandler>(); var realTimeHandler = new Mock <IRealTimeHandler>(); var objectStore = new Mock <IObjectStore>(); var brokerage = new Mock <IBrokerage>(); brokerage.Setup(x => x.IsConnected).Returns(true); brokerage.Setup(x => x.GetCashBalance()).Returns(new List <CashAmount>()); brokerage.Setup(x => x.GetAccountHoldings()).Returns(getHoldings); brokerage.Setup(x => x.GetOpenOrders()).Returns(getOrders); var setupHandler = new BrokerageSetupHandler(); IBrokerageFactory factory; setupHandler.CreateBrokerage(job, algorithm, out factory); var result = setupHandler.Setup(new SetupHandlerParameters(_dataManager.UniverseSelection, algorithm, brokerage.Object, job, resultHandler.Object, transactionHandler.Object, realTimeHandler.Object, objectStore.Object)); Assert.AreEqual(expected, result); foreach (var security in algorithm.Securities.Values) { if (security.Symbol.SecurityType == SecurityType.Option) { Assert.AreEqual(DataNormalizationMode.Raw, security.DataNormalizationMode); var underlyingSecurity = algorithm.Securities[security.Symbol.Underlying]; Assert.AreEqual(DataNormalizationMode.Raw, underlyingSecurity.DataNormalizationMode); } } }
/// <summary> /// Desktop/Local Get Next Task - Get task from the Algorithm folder of VS Solution. /// </summary> /// <returns></returns> public AlgorithmNodePacket NextJob(out string location) { location = AlgorithmLocation; //If this isn't a backtesting mode/request, attempt a live job. if (_liveMode) { var liveJob = new LiveNodePacket { Type = PacketType.LiveNode, DataEndpoint = DataFeedEndpoint.LiveTrading, RealTimeEndpoint = RealTimeEndpoint.LiveTrading, ResultEndpoint = ResultHandlerEndpoint.Console, SetupEndpoint = SetupHandlerEndpoint.Brokerage, TransactionEndpoint = TransactionHandlerEndpoint.Brokerage, Algorithm = File.ReadAllBytes(AlgorithmLocation), Brokerage = Config.Get("live-mode-brokerage", PaperBrokerageTypeName), Channel = Config.Get("job-channel"), UserId = Config.GetInt("job-user-id") }; try { // import the brokerage data for the configured brokerage var brokerageFactory = Composer.Instance.Single <IBrokerageFactory>(factory => factory.BrokerageType.MatchesTypeName(liveJob.Brokerage)); liveJob.BrokerageData = brokerageFactory.BrokerageData; } catch (Exception err) { Log.Error(string.Format("JobQueue.NextJob(): Error resoliving BrokerageData for live job for brokerage {0}. {1}", liveJob.Brokerage, err.Message)); } return(liveJob); } //Default run a backtesting job. var backtestJob = new BacktestNodePacket(0, 0, "", new byte[] {}, 10000, "local") { Type = PacketType.BacktestNode, DataEndpoint = DataFeedEndpoint.FileSystem, SetupEndpoint = SetupHandlerEndpoint.Console, ResultEndpoint = ResultHandlerEndpoint.Console, RealTimeEndpoint = RealTimeEndpoint.Backtesting, TransactionEndpoint = TransactionHandlerEndpoint.Backtesting, Algorithm = File.ReadAllBytes(AlgorithmLocation) }; return(backtestJob); }
internal static LiveNodePacket GetJob() { var job = new LiveNodePacket { UserId = 1, ProjectId = 1, DeployId = "1", Brokerage = "PaperBrokerage", DataQueueHandler = "none" }; // Increasing RAM limit, else the tests fail. This is happening in master, when running all the tests together, locally (not travis). job.Controls.RamAllocation = 1024 * 1024 * 1024; return(job); }
/// <summary> /// Removes the specified symbols from the subscription /// </summary> /// <param name="job">Job we're processing.</param> /// <param name="symbols">The symbols to be removed keyed by SecurityType</param> public void Unsubscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { var symbolsToUnsubscribe = symbols.Where(x => _subscribedSymbols.ContainsKey(x.Value)); foreach (var symbol in symbolsToUnsubscribe.Where(CanSubscribe)) { Log.Trace($"AlpacaBrokerage.Unsubscribe(): {symbol}"); _polygonStreamingClient.UnsubscribeQuote(symbol.Value); _polygonStreamingClient.UnsubscribeTrade(symbol.Value); Symbol removed; _subscribedSymbols.TryRemove(symbol.Value, out removed); } }
/// <summary> /// Initialize the result handler with this result packet. /// </summary> /// <param name="job">Algorithm job packet for this result handler</param> /// <param name="messagingHandler"></param> /// <param name="api"></param> /// <param name="dataFeed"></param> /// <param name="setupHandler"></param> /// <param name="transactionHandler"></param> public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler) { _api = api; _dataFeed = dataFeed; _messagingHandler = messagingHandler; _setupHandler = setupHandler; _transactionHandler = transactionHandler; _job = (LiveNodePacket)job; if (_job == null) { throw new Exception("LiveResultHandler.Constructor(): Submitted Job type invalid."); } _deployId = _job.DeployId; _compileId = _job.CompileId; }
/// <summary> /// Constructor for brokerage /// </summary> /// <param name="wssUrl">websockets url</param> /// <param name="websocket">instance of websockets client</param> /// <param name="restClient">instance of rest client</param> /// <param name="apiKey">api key</param> /// <param name="apiSecret">api secret</param> /// <param name="passPhrase">pass phrase</param> /// <param name="algorithm">the algorithm instance is required to retreive account type</param> /// <param name="priceProvider">The price provider for missing FX conversion rates</param> /// <param name="aggregator">consolidate ticks</param> /// <param name="job">The live job packet</param> public GDAXBrokerage(string wssUrl, IWebSocket websocket, IRestClient restClient, string apiKey, string apiSecret, string passPhrase, IAlgorithm algorithm, IPriceProvider priceProvider, IDataAggregator aggregator, LiveNodePacket job) : base(wssUrl, websocket, restClient, apiKey, apiSecret, "GDAX") { _job = job; FillSplit = new ConcurrentDictionary<long, GDAXFill>(); _passPhrase = passPhrase; _algorithm = algorithm; _priceProvider = priceProvider; _aggregator = aggregator; _isDataQueueHandler = this is GDAXDataQueueHandler; _fillMonitorTask = Task.Factory.StartNew(FillMonitorAction, _ctsFillMonitor.Token); }
public void AlgorithmTimeIsSetToUtcNowBeforePostInitialize() { var time = DateTime.UtcNow; TestAlgorithm algorithm = null; algorithm = new TestAlgorithm(() => { Assert.That(algorithm.UtcTime > time); }); Assert.AreEqual(new DateTime(1998, 1, 1), algorithm.UtcTime); algorithm.SetHistoryProvider(new BrokerageTransactionHandlerTests.BrokerageTransactionHandlerTests.EmptyHistoryProvider()); var job = new LiveNodePacket { UserId = 1, ProjectId = 1, DeployId = "1", Brokerage = "PaperBrokerage", DataQueueHandler = "none", Controls = new Controls { RamAllocation = 4096 } // no real limit }; var resultHandler = new Mock <IResultHandler>(); var transactionHandler = new Mock <ITransactionHandler>(); var realTimeHandler = new Mock <IRealTimeHandler>(); var brokerage = new Mock <IBrokerage>(); var objectStore = new Mock <IObjectStore>(); brokerage.Setup(x => x.IsConnected).Returns(true); brokerage.Setup(x => x.AccountBaseCurrency).Returns(Currencies.USD); brokerage.Setup(x => x.GetCashBalance()).Returns(new List <CashAmount>()); brokerage.Setup(x => x.GetAccountHoldings()).Returns(new List <Holding>()); brokerage.Setup(x => x.GetOpenOrders()).Returns(new List <Order>()); var setupHandler = new BrokerageSetupHandler(); IBrokerageFactory factory; setupHandler.CreateBrokerage(job, algorithm, out factory); Assert.IsTrue(setupHandler.Setup(new SetupHandlerParameters(_dataManager.UniverseSelection, algorithm, brokerage.Object, job, resultHandler.Object, transactionHandler.Object, realTimeHandler.Object, objectStore.Object))); Assert.Greater(algorithm.UtcTime, time); }
/// <summary> /// Remove the symbol from the subscription list. /// </summary> /// <param name="job">Live Job to subscribe with</param> /// <param name="symbols">List of symbols to unsubscribe from</param> public void Unsubscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { //Remove the symbols from the subscription list if there. foreach (var symbol in symbols) { string value; if (_subscriptions.TryRemove(symbol, out value)) { //!!! _optionNameResolver.Remove(symbol); //DON'T REMOVE FROM CACHE!! for now keep everything in the cache } } Log.Trace("TradeStationBrokerage.DataQueueHandler: removed symbols."); Refresh(); }
/// <summary> /// Desktop/Local Get Next Task - Get task from the Algorithm folder of VS Solution. /// </summary> /// <returns></returns> public AlgorithmNodePacket NextJob(out string location) { location = AlgorithmLocation; Log.Trace("JobQueue.NextJob(): Selected " + location); //If this isn't a backtesting mode/request, attempt a live job. if (_liveMode) { var liveJob = new LiveNodePacket { Type = PacketType.LiveNode, Algorithm = File.ReadAllBytes(AlgorithmLocation), Brokerage = Config.Get("live-mode-brokerage", PaperBrokerageTypeName), Channel = Config.Get("job-channel"), UserId = Config.GetInt("job-user-id"), Version = Constants.Version, DeployId = Config.Get("algorithm-type-name"), RamAllocation = int.MaxValue }; try { // import the brokerage data for the configured brokerage var brokerageFactory = Composer.Instance.Single <IBrokerageFactory>(factory => factory.BrokerageType.MatchesTypeName(liveJob.Brokerage)); liveJob.BrokerageData = brokerageFactory.BrokerageData; } catch (Exception err) { Log.Error(string.Format("JobQueue.NextJob(): Error resoliving BrokerageData for live job for brokerage {0}. {1}", liveJob.Brokerage, err.Message)); } return(liveJob); } //Default run a backtesting job. var backtestJob = new BacktestNodePacket(0, 0, "", new byte[] {}, 10000, "local") { Type = PacketType.BacktestNode, Algorithm = File.ReadAllBytes(AlgorithmLocation), Version = Constants.Version, BacktestId = Config.Get("algorithm-type-name"), RamAllocation = int.MaxValue, Language = (Language)Enum.Parse(typeof(Language), Config.Get("algorithm-language")) }; return(backtestJob); }
/// <summary> /// Creates a new IBrokerage instance and set ups the environment for the brokerage /// </summary> /// <param name="job">The job packet to create the brokerage for</param> /// <param name="algorithm">The algorithm instance</param> /// <returns>A new brokerage instance</returns> public override IBrokerage CreateBrokerage(LiveNodePacket job, IAlgorithm algorithm) { var errors = new List <string>(); // read values from the brokerage datas var port = Config.GetInt("ib-port", 4001); var host = Config.Get("ib-host", "127.0.0.1"); var twsDirectory = Config.Get("ib-tws-dir", "C:\\Jts"); var ibVersion = Config.Get("ib-version", "974"); var account = Read <string>(job.BrokerageData, "ib-account", errors); var userId = Read <string>(job.BrokerageData, "ib-user-name", errors); var password = Read <string>(job.BrokerageData, "ib-password", errors); var tradingMode = Read <string>(job.BrokerageData, "ib-trading-mode", errors); var agentDescription = Read <string>(job.BrokerageData, "ib-agent-description", errors); if (errors.Count != 0) { // if we had errors then we can't create the instance throw new Exception(string.Join(Environment.NewLine, errors)); } if (tradingMode.IsNullOrEmpty()) { throw new Exception("No trading mode selected. Please select either 'paper' or 'live' trading."); } var ib = new InteractiveBrokersBrokerage( algorithm, algorithm.Transactions, algorithm.Portfolio, Composer.Instance.GetExportedValueByTypeName <IDataAggregator>(Config.Get("data-aggregator", "QuantConnect.Lean.Engine.DataFeeds.AggregationManager")), Composer.Instance.GetExportedValueByTypeName <IMapFileProvider>(Config.Get("map-file-provider", "QuantConnect.Data.Auxiliary.LocalDiskMapFileProvider")), account, host, port, twsDirectory, ibVersion, userId, password, tradingMode, agentDescription); Composer.Instance.AddPart <IDataQueueHandler>(ib); return(ib); }
private IDataFeed RunDataFeed(out FuncDataQueueHandler dataQueueHandler, Func <FuncDataQueueHandler, IEnumerable <BaseData> > getNextTicksFunction = null, Resolution resolution = Resolution.Second, List <string> equities = null, List <string> forex = null) { _algorithm.SetStartDate(_startDate); var lastTime = _manualTimeProvider.GetUtcNow(); getNextTicksFunction = getNextTicksFunction ?? (fdqh => { var time = _manualTimeProvider.GetUtcNow(); if (time == lastTime) { return(Enumerable.Empty <BaseData>()); } lastTime = time; return(fdqh.Subscriptions.Where(symbol => !_algorithm.UniverseManager.ContainsKey(symbol)) // its not a universe .Select(symbol => new Tick(lastTime.ConvertFromUtc(TimeZones.NewYork), symbol, 1, 2) { Quantity = 1, // Symbol could not be in the Securities collections for the custom Universe tests. AlgorithmManager is in charge of adding them, and we are not executing that code here. TickType = _algorithm.Securities.ContainsKey(symbol) ? _algorithm.Securities[symbol].SubscriptionDataConfig.TickType : TickType.Trade })); }); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); dataQueueHandler = new FuncDataQueueHandler(getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler, _manualTimeProvider); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); var dataManager = new DataManager(feed, new UniverseSelection(feed, _algorithm), _algorithm.Settings, _algorithm.TimeKeeper); _algorithm.SubscriptionManager.SetDataManager(dataManager); _algorithm.AddSecurities(resolution, equities, forex); feed.Initialize(_algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); _algorithm.PostInitialize(); Thread.Sleep(150); // small handicap for the data to be pumped so TimeSlices have data of all subscriptions return(feed); }
public void FastExitsDoNotThrowUnhandledExceptions() { DataManager dataManager; var algorithm = new AlgorithmStub(out dataManager, Resolution.Tick, Enumerable.Range(0, 20).Select(x => x.ToString()).ToList()); var getNextTicksFunction = Enumerable.Range(0, 20).Select(x => new Tick { Symbol = SymbolCache.GetSymbol(x.ToString()) }).ToList(); // job is used to send into DataQueueHandler var job = new LiveNodePacket(); // result handler is used due to dependency in SubscriptionDataReader var resultHandler = new BacktestingResultHandler(); var dataQueueHandler = new FuncDataQueueHandler(handler => getNextTicksFunction); var feed = new TestableLiveTradingDataFeed(dataQueueHandler); var mapFileProvider = new LocalDiskMapFileProvider(); var fileProvider = new DefaultDataProvider(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, new LocalDiskFactorFileProvider(mapFileProvider), fileProvider, dataManager); var feedThreadStarted = new ManualResetEvent(false); var unhandledExceptionWasThrown = false; Task.Run(() => { try { feedThreadStarted.Set(); feed.Run(); } catch (Exception ex) { QuantConnect.Logging.Log.Error(ex.ToString()); unhandledExceptionWasThrown = true; } }); feedThreadStarted.WaitOne(); feed.Exit(); Thread.Sleep(1000); Assert.IsFalse(unhandledExceptionWasThrown); }
/// <summary> /// Initializes the data feed for the specified job and algorithm /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, IDataProvider dataProvider, IDataFeedSubscriptionManager subscriptionManager, IDataFeedTimeProvider dataFeedTimeProvider, IDataChannelProvider dataChannelProvider) { if (!(job is LiveNodePacket)) { throw new ArgumentException("The LiveTradingDataFeed requires a LiveNodePacket."); } _cancellationTokenSource = new CancellationTokenSource(); _job = (LiveNodePacket)job; _timeProvider = dataFeedTimeProvider.TimeProvider; _dataProvider = dataProvider; _mapFileProvider = mapFileProvider; _factorFileProvider = factorFileProvider; _channelProvider = dataChannelProvider; _frontierTimeProvider = dataFeedTimeProvider.FrontierTimeProvider; _customExchange = new BaseDataExchange("CustomDataExchange") { SleepInterval = 10 }; _subscriptions = subscriptionManager.DataFeedSubscriptions; _dataQueueHandler = GetDataQueueHandler(); _dataQueueHandler?.SetJob(_job); // run the custom data exchange var manualEvent = new ManualResetEventSlim(false); Task.Factory.StartNew(() => { manualEvent.Set(); _customExchange.Start(_cancellationTokenSource.Token); }, TaskCreationOptions.LongRunning); manualEvent.Wait(); manualEvent.DisposeSafely(); IsActive = true; }
public void SubscribeReturnsNotNull() { var dataHandlers = Newtonsoft.Json.JsonConvert.SerializeObject(new[] { "FakeDataQueue" }); var job = new LiveNodePacket { Brokerage = "OandaBrokerage", DataQueueHandler = dataHandlers }; var compositeDataQueueHandler = new DataQueueHandlerManager(); compositeDataQueueHandler.SetJob(job); var enumerator = compositeDataQueueHandler.Subscribe(GetConfig(), (_, _) => {}); Assert.NotNull(enumerator); compositeDataQueueHandler.Dispose(); enumerator.Dispose(); }
/******************************************************** * CONSTRUCTOR *********************************************************/ /// <summary> /// Initialize the live trading result handler /// </summary> /// <param name="job">Live trading job</param> public LiveTradingResultHandler(LiveNodePacket job) { _job = job; _deployId = job.DeployId; _compileId = job.CompileId; _charts = new ConcurrentDictionary <string, Chart>(); _messages = new ConcurrentQueue <Packet>(); _isActive = true; _runtimeStatistics = new Dictionary <string, string>(); _resamplePeriod = TimeSpan.FromSeconds(1); _notificationPeriod = TimeSpan.FromSeconds(1); _startTime = DateTime.Now; //Store log and debug messages sorted by time. _logStore = new List <LogEntry>(); }
/// <summary> /// Sets the job we're subscribing for /// </summary> /// <param name="job">Job we're subscribing for</param> public void SetJob(LiveNodePacket job) { Initialize( job.BrokerageData["zerodha-trading-segment"], job.BrokerageData["zerodha-product-type"], job.BrokerageData["zerodha-api-key"], job.BrokerageData["zerodha-access-token"], null, null, Composer.Instance.GetExportedValueByTypeName <IDataAggregator>(Config.Get("data-aggregator", "QuantConnect.Lean.Engine.DataFeeds.AggregationManager")) ); if (!IsConnected) { Connect(); } }
/// <summary> /// Initializes the data feed for the specified job and algorithm /// </summary> public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, IDataProvider dataProvider, IDataFeedSubscriptionManager subscriptionManager, IDataFeedTimeProvider dataFeedTimeProvider) { if (!(job is LiveNodePacket)) { throw new ArgumentException("The LiveTradingDataFeed requires a LiveNodePacket."); } _cancellationTokenSource = new CancellationTokenSource(); _algorithm = algorithm; _job = (LiveNodePacket)job; _timeProvider = dataFeedTimeProvider.TimeProvider; _dataQueueHandler = GetDataQueueHandler(); _dataProvider = dataProvider; _channelProvider = GetDataChannelProvider(); _frontierTimeProvider = dataFeedTimeProvider.FrontierTimeProvider; _customExchange = new BaseDataExchange("CustomDataExchange") { SleepInterval = 10 }; // sleep is controlled on this exchange via the GetNextTicksEnumerator _exchange = new BaseDataExchange("DataQueueExchange") { SleepInterval = 0 }; _exchange.AddEnumerator(DataQueueHandlerSymbol, GetNextTicksEnumerator()); _subscriptions = subscriptionManager.DataFeedSubscriptions; _universeSelection = subscriptionManager.UniverseSelection; // run the exchanges Task.Run(() => _exchange.Start(_cancellationTokenSource.Token)); Task.Run(() => _customExchange.Start(_cancellationTokenSource.Token)); IsActive = true; }
/// <summary> /// Creates a new IBrokerage instance and set ups the environment for the brokerage /// </summary> /// <param name="job">The job packet to create the brokerage for</param> /// <param name="algorithm">The algorithm instance</param> /// <returns>A new brokerage instance</returns> public override IBrokerage CreateBrokerage(LiveNodePacket job, IAlgorithm algorithm) { var errors = new List <string>(); // read values from the brokerage datas var port = Config.GetInt("ib-port", 4000); var host = Config.Get("ib-host", "127.0.0.1"); var twsDirectory = Config.Get("ib-tws-dir", "~/IBJts"); var ibVersion = Config.Get("ib-version", "978"); var account = Read <string>(job.BrokerageData, "ib-account", errors); var userId = Read <string>(job.BrokerageData, "ib-user-name", errors); var password = Read <string>(job.BrokerageData, "ib-password", errors); var tradingMode = Read <string>(job.BrokerageData, "ib-trading-mode", errors); var agentDescription = Read <string>(job.BrokerageData, "ib-agent-description", errors); if (errors.Count != 0) { // if we had errors then we can't create the instance throw new Exception(string.Join(Environment.NewLine, errors)); } if (tradingMode.IsNullOrEmpty()) { throw new Exception("No trading mode selected. Please select either 'paper' or 'live' trading."); } var ib = new InteractiveBrokersBrokerage( algorithm, algorithm.Transactions, algorithm.Portfolio, account, host, port, twsDirectory, ibVersion, userId, password, tradingMode, agentDescription); Composer.Instance.AddPart <IDataQueueHandler>(ib); return(ib); }
/// <summary> /// Adds the specified symbols to the subscription /// </summary> /// <param name="job">Job we're subscribing for:</param> /// <param name="symbols">The symbols to be added keyed by SecurityType</param> public void Subscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { foreach (var tmpSymbol in symbols.ToList()) { _symbols.AddOrUpdate(tmpSymbol, tmpSymbol, (k, v) => { return(tmpSymbol); }); QuantConnect.Logging.Log.Trace("TEBBrokerage.Subscribe(): {0}", tmpSymbol); } //yada //foreach (var symbol in symbols) //{ // lock (_sync) // { // _symbols.Add(symbol); // QuantConnect.Logging.Log.Trace("TEBBrokerage.Subscribe(): {0}", symbol); // } //} }
public void SubscribeReturnsNotNull() { var dataHandlers = Newtonsoft.Json.JsonConvert.SerializeObject(new[] { "FakeDataQueue" }); var job = new LiveNodePacket { Brokerage = "ZerodhaBrokerage", DataQueueHandler = dataHandlers }; var compositeDataQueueHandler = new CompositeDataQueueHandler(); compositeDataQueueHandler.SetJob(job); var dataConfig = new SubscriptionDataConfig(typeof(TradeBar), Symbols.SPY, Resolution.Minute, TimeZones.NewYork, TimeZones.NewYork, false, false, false, false, TickType.Trade, false); var enumerator = compositeDataQueueHandler.Subscribe(dataConfig, (_, _) => {}); Assert.NotNull(enumerator); compositeDataQueueHandler.Dispose(); enumerator.Dispose(); }
/// <summary> /// Adds the specified symbols to the subscription /// </summary> /// <param name="job">Job we're subscribing for:</param> /// <param name="symbols">The symbols to be added keyed by SecurityType</param> public void Subscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { var symbolsToSubscribe = (from symbol in symbols where !_subscribedSymbols.Contains(symbol) && CanSubscribe(symbol) select symbol).ToList(); if (symbolsToSubscribe.Count == 0) { return; } Log.Trace("FxcmBrokerage.Subscribe(): {0}", string.Join(",", symbolsToSubscribe)); var request = new MarketDataRequest(); foreach (var symbol in symbolsToSubscribe) { TradingSecurity fxcmSecurity; if (_fxcmInstruments.TryGetValue(_symbolMapper.GetBrokerageSymbol(symbol), out fxcmSecurity)) { request.addRelatedSymbol(fxcmSecurity); // cache exchange time zone for symbol DateTimeZone exchangeTimeZone; if (!_symbolExchangeTimeZones.TryGetValue(symbol, out exchangeTimeZone)) { exchangeTimeZone = MarketHoursDatabase.FromDataFolder().GetExchangeHours(Market.FXCM, symbol, symbol.SecurityType).TimeZone; _symbolExchangeTimeZones.Add(symbol, exchangeTimeZone); } } } request.setSubscriptionRequestType(SubscriptionRequestTypeFactory.SUBSCRIBE); request.setMDEntryTypeSet(MarketDataRequest.MDENTRYTYPESET_ALL); lock (_locker) { _gateway.sendMessage(request); } foreach (var symbol in symbolsToSubscribe) { _subscribedSymbols.Add(symbol); } }
public void NotificationRoundTrip() { var expectedEmail = new NotificationEmail("*****@*****.**", "crypto", null, null, new Dictionary <string, string> { { "header-key", "header-value" } }); var packet = new LiveNodePacket { NotificationTargets = new List <Notification> { expectedEmail, new NotificationSms("123", null), new NotificationWeb("www.pupu.com", headers: new Dictionary <string, string> { { "header-key", "header-value" } }) } }; var serialized = JsonConvert.SerializeObject(packet); var instance = JsonConvert.DeserializeObject <LiveNodePacket>(serialized); var email = instance.NotificationTargets[0] as NotificationEmail; Assert.IsNotNull(email); Assert.AreEqual(expectedEmail.Address, email.Address); Assert.AreEqual(expectedEmail.Subject, email.Subject); Assert.AreEqual(expectedEmail.Message, email.Message); Assert.AreEqual(expectedEmail.Data, email.Data); Assert.AreEqual(expectedEmail.Headers, email.Headers); var sms = instance.NotificationTargets[1] as NotificationSms; Assert.IsNotNull(sms); Assert.AreEqual("123", sms.PhoneNumber); Assert.AreEqual(null, sms.Message); var web = instance.NotificationTargets[2] as NotificationWeb; Assert.IsNotNull(web); Assert.AreEqual("www.pupu.com", web.Address); Assert.AreEqual(null, web.Data); Assert.AreEqual((packet.NotificationTargets[2] as NotificationWeb).Headers, web.Headers); }
public void InitializesInstanceFromComposer() { var composer = Composer.Instance; using (var factory = composer.Single <IBrokerageFactory>(instance => instance.BrokerageType == typeof(InteractiveBrokersBrokerage))) { Assert.IsNotNull(factory); var job = new LiveNodePacket { BrokerageData = factory.BrokerageData }; var brokerage = factory.CreateBrokerage(job, AlgorithmDependency); Assert.IsNotNull(brokerage); Assert.IsInstanceOf <InteractiveBrokersBrokerage>(brokerage); brokerage.Connect(); Assert.IsTrue(brokerage.IsConnected); } }
/// <summary> /// Removes the specified symbols to the subscription /// </summary> /// <param name="job">Job we're processing.</param> /// <param name="symbols">The symbols to be removed keyed by SecurityType</param> public void Unsubscribe(LiveNodePacket job, IEnumerable <Symbol> symbols) { lock (_lockerSubscriptions) { var symbolsToUnsubscribe = (from symbol in symbols where _subscribedSymbols.Contains(symbol) select symbol).ToList(); if (symbolsToUnsubscribe.Count == 0) { return; } Log.Trace($"CoinApiDataQueueHandler.Unsubscribe(): {string.Join(",", symbolsToUnsubscribe.Select(x => x.Value))}"); // CoinAPI requires at least 5 seconds between subscription requests so we need to batch them _subscribedSymbols = _subscribedSymbols.Where(x => !symbolsToUnsubscribe.Contains(x)).ToHashSet(); ProcessSubscriptionRequest(); } }