/******************************************************** * CLASS CONSTRUCTOR *********************************************************/ /// <summary> /// Create an instance of the base datafeed. /// </summary> public BaseDataFeed(IAlgorithm algorithm, BacktestNodePacket job) { //Save the data subscriptions Subscriptions = algorithm.SubscriptionManager.Subscriptions; _subscriptions = Subscriptions.Count; //Public Properties: DataFeed = DataFeedEndpoint.FileSystem; IsActive = true; Bridge = new ConcurrentQueue<List<BaseData>>[_subscriptions]; EndOfBridge = new bool[_subscriptions]; SubscriptionReaderManagers = new SubscriptionDataReader[_subscriptions]; RealtimePrices = new List<decimal>(_subscriptions); _frontierTime = new DateTime[_subscriptions]; //Class Privates: _job = job; _algorithm = algorithm; _endOfStreams = false; _bridgeMax = _bridgeMax / _subscriptions; //Initialize arrays: for (var i = 0; i < _subscriptions; i++) { _frontierTime[i] = job.PeriodStart; EndOfBridge[i] = false; Bridge[i] = new ConcurrentQueue<List<BaseData>>(); SubscriptionReaderManagers[i] = new SubscriptionDataReader(Subscriptions[i], algorithm.Securities[Subscriptions[i].Symbol], DataFeedEndpoint.Database, job.PeriodStart, job.PeriodFinish); } }
/******************************************************** * CLASS CONSTRUCTOR *********************************************************/ /// <summary> /// Create a new backtesting data feed. /// </summary> /// <param name="algorithm">Instance of the algorithm</param> /// <param name="job">Algorithm work task</param> public FileSystemDataFeed(IAlgorithm algorithm, BacktestNodePacket job) { Subscriptions = algorithm.SubscriptionManager.Subscriptions; _subscriptions = Subscriptions.Count; //Public Properties: DataFeed = DataFeedEndpoint.FileSystem; IsActive = true; Bridge = new ConcurrentQueue<List<BaseData>>[_subscriptions]; EndOfBridge = new bool[_subscriptions]; SubscriptionReaders = new SubscriptionDataReader[_subscriptions]; FillForwardFrontiers = new DateTime[_subscriptions]; RealtimePrices = new List<decimal>(_subscriptions); //Class Privates: _job = job; _algorithm = algorithm; _endOfStreams = false; _bridgeMax = _bridgeMax / _subscriptions; //Set the bridge maximum count: for (var i = 0; i < _subscriptions; i++) { //Create a new instance in the dictionary: Bridge[i] = new ConcurrentQueue<List<BaseData>>(); EndOfBridge[i] = false; SubscriptionReaders[i] = new SubscriptionDataReader(Subscriptions[i], _algorithm.Securities[Subscriptions[i].Symbol], DataFeed, _job.PeriodStart, _job.PeriodFinish); FillForwardFrontiers[i] = new DateTime(); } }
/******************************************************** * CLASS CONSTRUCTOR *********************************************************/ /// <summary> /// Create a new backtesting data feed. /// </summary> /// <param name="algorithm">Instance of the algorithm</param> /// <param name="job">Algorithm work task</param> public FileSystemDataFeed(IAlgorithm algorithm, BacktestNodePacket job) { Subscriptions = algorithm.SubscriptionManager.Subscriptions; _subscriptions = Subscriptions.Count; //Public Properties: DataFeed = DataFeedEndpoint.FileSystem; IsActive = true; Bridge = new ConcurrentQueue<List<BaseData>>[_subscriptions]; EndOfBridge = new bool[_subscriptions]; SubscriptionReaderManagers = new SubscriptionDataReader[_subscriptions]; FillForwardFrontiers = new DateTime[_subscriptions]; RealtimePrices = new List<decimal>(_subscriptions); //Class Privates: _job = job; _algorithm = algorithm; _endOfStreams = false; _bridgeMax = _bridgeMax / _subscriptions; //Set the bridge maximum count: }
public void TestsFileSystemDataFeedSpeed() { var job = new BacktestNodePacket(); var resultHandler = new BacktestingResultHandler(); var mapFileProvider = new LocalDiskMapFileProvider(); var factorFileProvider = new LocalDiskFactorFileProvider(mapFileProvider); var dataFileProvider = new DefaultDataFileProvider(); var algorithm = new BenchmarkTest(); var feed = new FileSystemDataFeed(); feed.Initialize(algorithm, job, resultHandler, mapFileProvider, factorFileProvider, dataFileProvider); algorithm.Initialize(); var feedThreadStarted = new ManualResetEvent(false); Task.Factory.StartNew(() => { feedThreadStarted.Set(); feed.Run(); }); feedThreadStarted.WaitOne(); var stopwatch = Stopwatch.StartNew(); var lastMonth = -1; var count = 0; foreach (var timeSlice in feed) { if (timeSlice.Time.Month != lastMonth) { Console.WriteLine(DateTime.Now + " - Time: " + timeSlice.Time); lastMonth = timeSlice.Time.Month; } count++; } Console.WriteLine("Count: " + count); stopwatch.Stop(); Console.WriteLine("Elapsed time: " + stopwatch.Elapsed); }
/// <summary> /// Compose result data packet - with tradable dates from the backtest job task and the partial result packet. /// </summary> /// <param name="job">Job that started this request</param> /// <param name="results">Results class for the Backtest job</param> /// <param name="progress">Progress of the packet. For the packet we assume progess of 100%.</param> public BacktestResultPacket(BacktestNodePacket job, BacktestResult results, decimal progress = 1m) : base(PacketType.BacktestResult) { try { Progress = Math.Round(progress, 3); SessionId = job.SessionId; PeriodFinish = job.PeriodFinish; PeriodStart = job.PeriodStart; CompileId = job.CompileId; Channel = job.Channel; BacktestId = job.BacktestId; Results = results; Name = job.Name; UserId = job.UserId; ProjectId = job.ProjectId; SessionId = job.SessionId; TradeableDates = job.TradeableDates; } catch (Exception err) { Log.Error(err); } }
/// <summary> /// Desktop/Local Get Next Task - Get task from the Algorithm folder of VS Solution. /// </summary> /// <returns></returns> public AlgorithmNodePacket NextJob(out string location) { location = AlgorithmLocation; Log.Trace("JobQueue.NextJob(): Selected " + location); // check for parameters in the config var parameters = new Dictionary<string, string>(); var parametersConfigString = Config.Get("parameters"); if (parametersConfigString != string.Empty) { parameters = JsonConvert.DeserializeObject<Dictionary<string, string>>(parametersConfigString); } //If this isn't a backtesting mode/request, attempt a live job. if (_liveMode) { var liveJob = new LiveNodePacket { Type = PacketType.LiveNode, Algorithm = File.ReadAllBytes(AlgorithmLocation), Brokerage = Config.Get("live-mode-brokerage", PaperBrokerageTypeName), Channel = AccessToken, UserId = UserId, ProjectId = ProjectId, Version = Globals.Version, DeployId = AlgorithmTypeName, RamAllocation = int.MaxValue, Parameters = parameters, Language = Language, }; try { // import the brokerage data for the configured brokerage var brokerageFactory = Composer.Instance.Single<IBrokerageFactory>(factory => factory.BrokerageType.MatchesTypeName(liveJob.Brokerage)); liveJob.BrokerageData = brokerageFactory.BrokerageData; } catch (Exception err) { Log.Error(err, string.Format("Error resolving BrokerageData for live job for brokerage {0}:", liveJob.Brokerage)); } return liveJob; } //Default run a backtesting job. var backtestJob = new BacktestNodePacket(0, 0, "", new byte[] {}, 10000, "local") { Type = PacketType.BacktestNode, Algorithm = File.ReadAllBytes(AlgorithmLocation), Channel = AccessToken, UserId = UserId, ProjectId = ProjectId, Version = Globals.Version, BacktestId = AlgorithmTypeName, RamAllocation = int.MaxValue, Language = Language, Parameters = parameters }; return backtestJob; }
/******************************************************** * CONSTRUCTOR *********************************************************/ /// <summary> /// Backtesting result handler constructor. /// </summary> /// <remarks>Setup the default sampling and notification periods based on the backtest length.</remarks> public BacktestingResultHandler(BacktestNodePacket job) { _job = job; _exitTriggered = false; _compileId = job.CompileId; _backtestId = job.BacktestId; _timeRequested = DateTime.Now; //Get the resample period: double samples = 4000; double minimumSamplePeriod = 4; double totalMinutes = (job.PeriodFinish - job.PeriodStart).TotalMinutes; var resampleMinutes = (totalMinutes < (minimumSamplePeriod * samples)) ? minimumSamplePeriod : (totalMinutes / samples); // Space out the sampling every _resamplePeriod = TimeSpan.FromMinutes(resampleMinutes); Log.Trace("BacktestingResultHandler(): Sample Period Set: " + resampleMinutes.ToString("00.00")); //Notification Period for Browser Pushes: _notificationPeriod = TimeSpan.FromSeconds(2); //Initialize Properties: _messages = new ConcurrentQueue<Packet>(); _charts = new ConcurrentDictionary<string, Chart>(); _chartLock = new Object(); _isActive = true; //Set the start time for the algorithm _startTime = DateTime.Now; //Default charts: Charts.AddOrUpdate("Strategy Equity", new Chart("Strategy Equity", ChartType.Stacked)); Charts["Strategy Equity"].Series.Add("Equity", new Series("Equity", SeriesType.Candle)); Charts["Strategy Equity"].Series.Add("Daily Performance", new Series("Daily Performance", SeriesType.Bar, "%")); }
public BacktestConsoleStatusHandler(BacktestNodePacket _job) { this._job = _job; }
/// <summary> /// Initialize the result handler with this result packet. /// </summary> /// <param name="job">Algorithm job packet for this result handler</param> /// <param name="messagingHandler">The handler responsible for communicating messages to listeners</param> /// <param name="api">The api instance used for handling logs</param> /// <param name="dataFeed"></param> /// <param name="setupHandler"></param> /// <param name="transactionHandler"></param> public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler) { _api = api; _messagingHandler = messagingHandler; _transactionHandler = transactionHandler; _job = (BacktestNodePacket)job; if (_job == null) throw new Exception("BacktestingResultHandler.Constructor(): Submitted Job type invalid."); _compileId = _job.CompileId; _backtestId = _job.BacktestId; }
public void MessageHandler_WillSend_NewBackTestJob_ToCorrectRoute() { var backtest = new BacktestNodePacket(); using (var pullSocket = new PullSocket(">tcp://localhost:" + _port)) { _messageHandler.SetAuthentication(backtest); var message = pullSocket.ReceiveMultipartMessage(); var payload = message[0].ConvertToString(); var packet = JsonConvert.DeserializeObject<Packet>(payload); Assert.IsTrue(message.FrameCount == 1); Assert.IsTrue(PacketType.BacktestNode == packet.Type); Assert.IsTrue(payload == JsonConvert.SerializeObject(backtest)); } }
/// <summary> /// Desktop/Local Get Next Task - Get task from the Algorithm folder of VS Solution. /// </summary> /// <returns></returns> public AlgorithmNodePacket NextJob(out string location) { location = AlgorithmLocation; Log.Trace("JobQueue.NextJob(): Selected " + location); //If this isn't a backtesting mode/request, attempt a live job. if (_liveMode) { var liveJob = new LiveNodePacket { Type = PacketType.LiveNode, Algorithm = File.ReadAllBytes(AlgorithmLocation), Brokerage = Config.Get("live-mode-brokerage", PaperBrokerageTypeName), Channel = Config.Get("job-channel"), UserId = Config.GetInt("job-user-id"), Version = Constants.Version, DeployId = Config.Get("algorithm-type-name"), RamAllocation = int.MaxValue }; try { // import the brokerage data for the configured brokerage var brokerageFactory = Composer.Instance.Single<IBrokerageFactory>(factory => factory.BrokerageType.MatchesTypeName(liveJob.Brokerage)); liveJob.BrokerageData = brokerageFactory.BrokerageData; } catch (Exception err) { Log.Error(string.Format("JobQueue.NextJob(): Error resoliving BrokerageData for live job for brokerage {0}. {1}", liveJob.Brokerage, err.Message)); } return liveJob; } //Default run a backtesting job. var backtestJob = new BacktestNodePacket(0, 0, "", new byte[] {}, 10000, "local") { Type = PacketType.BacktestNode, Algorithm = File.ReadAllBytes(AlgorithmLocation), Version = Constants.Version, BacktestId = Config.Get("algorithm-type-name"), RamAllocation = int.MaxValue }; return backtestJob; }
/******************************************************** * CLASS VARIABLES *********************************************************/ /******************************************************** * CLASS PROPERTIES *********************************************************/ /******************************************************** * CLASS CONSTRUCTOR *********************************************************/ /// <summary> /// Pass through the backtesting datafeed to the underlying file system datafeed implementation. /// </summary> /// <param name="algorithm">Algorithm we're operating with</param> /// <param name="job">Algorithm worker job</param> public BacktestingDataFeed(IAlgorithm algorithm, BacktestNodePacket job) : base(algorithm, job) { DataFeed = DataFeedEndpoint.Backtesting; }
/// <summary> /// Initialize the result handler with this result packet. /// </summary> /// <param name="job">Algorithm job packet for this result handler</param> /// <param name="messagingHandler">The handler responsible for communicating messages to listeners</param> /// <param name="api">The api instance used for handling logs</param> /// <param name="dataFeed"></param> /// <param name="setupHandler"></param> /// <param name="transactionHandler"></param> public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler) { _api = api; _messagingHandler = messagingHandler; _transactionHandler = transactionHandler; _job = (BacktestNodePacket)job; if (_job == null) throw new Exception("BacktestingResultHandler.Constructor(): Submitted Job type invalid."); _compileId = _job.CompileId; _backtestId = _job.BacktestId; //Get the resample period: var totalMinutes = (_job.PeriodFinish - _job.PeriodStart).TotalMinutes; var resampleMinutes = (totalMinutes < (_minimumSamplePeriod * _samples)) ? _minimumSamplePeriod : (totalMinutes / _samples); // Space out the sampling every _resamplePeriod = TimeSpan.FromMinutes(resampleMinutes); Log.Trace("BacktestingResultHandler(): Sample Period Set: " + resampleMinutes.ToString("00.00")); }
/// <summary> /// Desktop/Local Get Next Task - Get task from the Algorithm folder of VS Solution. /// </summary> /// <returns></returns> public AlgorithmNodePacket NextJob(out string location) { location = AlgorithmLocation; //If this isn't a backtesting mode/request, attempt a live job. if (!BacktestingMode) { var liveJob = new LiveNodePacket { ResultEndpoint = ResultHandlerEndpoint.LiveTrading, SetupEndpoint = SetupHandlerEndpoint.PaperTrading, DataEndpoint = DataFeedEndpoint.LiveTrading, TransactionEndpoint = TransactionHandlerEndpoint.Backtesting, RealTimeEndpoint = RealTimeEndpoint.LiveTrading, Type = PacketType.LiveNode, Algorithm = File.ReadAllBytes(AlgorithmLocation) }; return liveJob; } //Default run a backtesting job. var backtestJob = new BacktestNodePacket(0, 0, "", new byte[] {}, 10000, "local") { ResultEndpoint = ResultHandlerEndpoint.Console, SetupEndpoint = SetupHandlerEndpoint.Console, DataEndpoint = DataFeedEndpoint.FileSystem, TransactionEndpoint = TransactionHandlerEndpoint.Backtesting, RealTimeEndpoint = RealTimeEndpoint.Backtesting, Type = PacketType.BacktestNode, Algorithm = File.ReadAllBytes(AlgorithmLocation) }; return backtestJob; }
/// <summary> /// Compose result data packet - with tradable dates from the backtest job task and the partial result packet. /// </summary> /// <param name="job">Job that started this request</param> /// <param name="results">Results class for the Backtest job</param> /// <param name="progress">Progress of the packet. For the packet we assume progess of 100%.</param> public BacktestResultPacket(BacktestNodePacket job, BacktestResult results, decimal progress = 1m) : base(PacketType.BacktestResult) { try { Progress = Math.Round(progress, 3); SessionId = job.SessionId; PeriodFinish = job.PeriodFinish; PeriodStart = job.PeriodStart; CompileId = job.CompileId; Channel = job.Channel; BacktestId = job.BacktestId; Results = results; Name = job.Name; UserId = job.UserId; ProjectId = job.ProjectId; SessionId = job.SessionId; TradeableDates = job.TradeableDates; } catch (Exception err) { Log.Error("BacktestResultPacket.Constructor: " + err.Message); } }
/// <summary> /// Desktop/Local Get Next Task - Get task from the Algorithm folder of VS Solution. /// </summary> /// <returns></returns> public AlgorithmNodePacket NextJob(out string location) { location = AlgorithmLocation; //If this isn't a backtesting mode/request, attempt a live job. if (_liveMode) { var liveJob = new LiveNodePacket { Type = PacketType.LiveNode, DataEndpoint = DataFeedEndpoint.LiveTrading, RealTimeEndpoint = RealTimeEndpoint.LiveTrading, ResultEndpoint = ResultHandlerEndpoint.Console, SetupEndpoint = SetupHandlerEndpoint.Brokerage, TransactionEndpoint = TransactionHandlerEndpoint.Brokerage, Algorithm = File.ReadAllBytes(AlgorithmLocation), Brokerage = Config.Get("live-mode-brokerage", PaperBrokerageTypeName), Channel = Config.Get("job-channel"), UserId = Config.GetInt("job-user-id"), Version = Constants.Version }; try { // import the brokerage data for the configured brokerage var brokerageFactory = Composer.Instance.Single<IBrokerageFactory>(factory => factory.BrokerageType.MatchesTypeName(liveJob.Brokerage)); liveJob.BrokerageData = brokerageFactory.BrokerageData; // if we're doing paper select the correct transaction handler if (liveJob.Brokerage == "PaperBrokerage") { liveJob.TransactionEndpoint = TransactionHandlerEndpoint.Backtesting; } } catch (Exception err) { Log.Error(string.Format("JobQueue.NextJob(): Error resoliving BrokerageData for live job for brokerage {0}. {1}", liveJob.Brokerage, err.Message)); } return liveJob; } //Default run a backtesting job. var backtestJob = new BacktestNodePacket(0, 0, "", new byte[] {}, 10000, "local") { Type = PacketType.BacktestNode, DataEndpoint = DataFeedEndpoint.FileSystem, SetupEndpoint = SetupHandlerEndpoint.Console, ResultEndpoint = ResultHandlerEndpoint.Console, RealTimeEndpoint = RealTimeEndpoint.Backtesting, TransactionEndpoint = TransactionHandlerEndpoint.Backtesting, Algorithm = File.ReadAllBytes(AlgorithmLocation), Version = Constants.Version }; return backtestJob; }