/// <summary> /// Initializes a new instance of the <see cref="DataPlanner<T>"/> class. /// </summary> /// <param name="algorithm">The algorithm.</param> public DataPlanner(IAlgorithm algorithm) { if (algorithm == null) throw new ArgumentNullException("algorithm"); m_Algorithm = algorithm; }
public void Initialize(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IMapFileProvider mapFileProvider) { if (algorithm.SubscriptionManager.Subscriptions.Count == 0 && algorithm.Universes.IsNullOrEmpty()) { throw new Exception("No subscriptions registered and no universe defined."); } _algorithm = algorithm; _resultHandler = resultHandler; _mapFileProvider = mapFileProvider; _subscriptions = new ConcurrentDictionary<Symbol, Subscription>(); _cancellationTokenSource = new CancellationTokenSource(); IsActive = true; Bridge = new BusyBlockingCollection<TimeSlice>(100); var ffres = Time.OneSecond; _fillForwardResolution = Ref.Create(() => ffres, res => ffres = res); // find the minimum resolution, ignoring ticks ffres = ResolveFillForwardResolution(algorithm); // add each universe selection subscription to the feed foreach (var universe in _algorithm.Universes) { var startTimeUtc = _algorithm.StartDate.ConvertToUtc(_algorithm.TimeZone); var endTimeUtc = _algorithm.EndDate.ConvertToUtc(_algorithm.TimeZone); AddUniverseSubscription(universe, startTimeUtc, endTimeUtc); } }
public Run(string name, IAlgorithm algorithm) : base(name) { if (algorithm == null) throw new ArgumentNullException(); color = Color.Black; description = ItemDescription; Initialize(algorithm); }
/******************************************************** * CLASS CONSTRUCTOR *********************************************************/ /// <summary> /// Live trading datafeed handler provides a base implementation of a live trading datafeed. Derived types /// need only implement the GetNextTicks() function to return unprocessed ticks from a data source. /// This creates a new data feed with a DataFeedEndpoint of LiveTrading. /// </summary> /// <param name="algorithm">Algorithm requesting data</param> protected LiveTradingDataFeed(IAlgorithm algorithm) { //Subscription Count: _subscriptions = algorithm.SubscriptionManager.Subscriptions; //Set Properties: _dataFeed = DataFeedEndpoint.LiveTrading; _isActive = true; _bridge = new ConcurrentQueue<List<BaseData>>[Subscriptions.Count]; _endOfBridge = new bool[Subscriptions.Count]; _subscriptionManagers = new SubscriptionDataReader[Subscriptions.Count]; _realtimePrices = new List<decimal>(); //Class Privates: _algorithm = algorithm; //Setup the arrays: for (var i = 0; i < Subscriptions.Count; i++) { _endOfBridge[i] = false; _bridge[i] = new ConcurrentQueue<List<BaseData>>(); //This is quantconnect data source, store here for speed/ease of access _isDynamicallyLoadedData.Add(algorithm.Securities[_subscriptions[i].Symbol].IsDynamicallyLoadedData); //Subscription managers for downloading user data: _subscriptionManagers[i] = new SubscriptionDataReader(_subscriptions[i], algorithm.Securities[_subscriptions[i].Symbol], DataFeedEndpoint.LiveTrading, DateTime.MinValue, DateTime.MaxValue); //Set up the source file for today: _subscriptionManagers[i].RefreshSource(DateTime.Now.Date); _realtimePrices.Add(0); } }
/// <summary> /// O(n^2) /// </summary> /// <param name="s"></param> /// <returns></returns> public long UpdateSingles(IAlgorithm s) { var sw = new Stopwatch(); sw.Start(); s.Singles.Clear(); var n = s.Points.Count; for (var i = 0; i < n; i++) { var p1 = s.Points[i]; var add = true; for (var j = 0; j < n; j++) { if (i == j) continue; var p2 = s.Points[j]; var dist = p1.Distance(p2.X, p2.Y); if (!(dist > s.Rectangle.MaxDistance)) { add = false; break; } } if (add) s.Singles.Add(p1); } sw.Stop(); return sw.ElapsedMilliseconds; }
public ProcessingDialogViewModel() { LoadParameters = new DelegateCommand<MassSpecStudio.Core.Domain.Algorithm>(OnLoadParameters); _algorithms = ServiceLocator.Current.GetAllInstances<IAlgorithm>().ToList(); _selectedAlgorithm = _algorithms.FirstOrDefault(); _recentAlgorithmsUsed = RecentAlgorithms.Read(); }
/// <summary> /// Creates a new BacktestingBrokerage for the specified algorithm /// </summary> /// <param name="algorithm">The algorithm instance</param> public BacktestingBrokerage(IAlgorithm algorithm) : base("Backtesting Brokerage") { _algorithm = algorithm; _orders = _algorithm.Transactions.Orders; _pending = new ConcurrentDictionary<int, Order>(); }
/******************************************************** * CLASS CONSTRUCTOR *********************************************************/ /// <summary> /// Create an instance of the base datafeed. /// </summary> public BaseDataFeed(IAlgorithm algorithm, BacktestNodePacket job) { //Save the data subscriptions Subscriptions = algorithm.SubscriptionManager.Subscriptions; _subscriptions = Subscriptions.Count; //Public Properties: DataFeed = DataFeedEndpoint.FileSystem; IsActive = true; Bridge = new ConcurrentQueue<List<BaseData>>[_subscriptions]; EndOfBridge = new bool[_subscriptions]; SubscriptionReaderManagers = new SubscriptionDataReader[_subscriptions]; RealtimePrices = new List<decimal>(_subscriptions); _frontierTime = new DateTime[_subscriptions]; //Class Privates: _job = job; _algorithm = algorithm; _endOfStreams = false; _bridgeMax = _bridgeMax / _subscriptions; //Initialize arrays: for (var i = 0; i < _subscriptions; i++) { _frontierTime[i] = job.PeriodStart; EndOfBridge[i] = false; Bridge[i] = new ConcurrentQueue<List<BaseData>>(); SubscriptionReaderManagers[i] = new SubscriptionDataReader(Subscriptions[i], algorithm.Securities[Subscriptions[i].Symbol], DataFeedEndpoint.Database, job.PeriodStart, job.PeriodFinish); } }
/// <summary> /// Creates a new BacktestingBrokerage for the specified algorithm /// </summary> /// <param name="algorithm">The algorithm instance</param> /// <param name="name">The name of the brokerage</param> protected BacktestingBrokerage(IAlgorithm algorithm, string name) : base(name) { _algorithm = algorithm; _orders = _algorithm.Transactions.Orders; _pending = new ConcurrentDictionary<int, Order>(); }
/******************************************************** * PUBLIC CONSTRUCTOR *********************************************************/ /// <summary> /// Setup the algorithm data, cash, job start end date etc. /// </summary> public BacktestingRealTimeHandler(IAlgorithm algorithm, AlgorithmNodePacket job) { //Initialize: _algorithm = algorithm; _events = new List<RealTimeEvent>(); _job = job; }
/// <summary> /// Creates a new IBrokerage instance and set ups the environment for the brokerage /// </summary> /// <param name="job">The job packet to create the brokerage for</param> /// <param name="algorithm">The algorithm instance</param> /// <returns>A new brokerage instance</returns> public override IBrokerage CreateBrokerage(LiveNodePacket job, IAlgorithm algorithm) { var errors = new List<string>(); // read values from the brokerage datas var useTws = Config.GetBool("ib-use-tws"); var port = Config.GetInt("ib-port", 4001); var host = Config.Get("ib-host", "127.0.0.1"); var twsDirectory = Config.Get("ib-tws-dir", "C:\\Jts"); var ibControllerDirectory = Config.Get("ib-controller-dir", "C:\\IBController"); var account = Read<string>(job.BrokerageData, "ib-account", errors); var userID = Read<string>(job.BrokerageData, "ib-user-name", errors); var password = Read<string>(job.BrokerageData, "ib-password", errors); var agentDescription = Read<AgentDescription>(job.BrokerageData, "ib-agent-description", errors); if (errors.Count != 0) { // if we had errors then we can't create the instance throw new Exception(string.Join(Environment.NewLine, errors)); } // launch the IB gateway InteractiveBrokersGatewayRunner.Start(ibControllerDirectory, twsDirectory, userID, password, useTws); var ib = new InteractiveBrokersBrokerage(algorithm.Transactions, algorithm.Portfolio, account, host, port, agentDescription); Composer.Instance.AddPart<IDataQueueHandler>(ib); return ib; }
/// <summary> /// Runs this command against the specified algorithm instance /// </summary> /// <param name="algorithm">The algorithm to run this command against</param> public CommandResultPacket Run(IAlgorithm algorithm) { var ticket = algorithm.Transactions.CancelOrder(OrderId); return ticket.CancelRequest != null ? new Result(this, true, ticket.QuantityFilled) : new Result(this, false, ticket.QuantityFilled); }
/******************************************************** * CLASS CONSTRUCTOR *********************************************************/ /// <summary> /// Create a new backtesting data feed. /// </summary> /// <param name="algorithm">Instance of the algorithm</param> /// <param name="job">Algorithm work task</param> public FileSystemDataFeed(IAlgorithm algorithm, BacktestNodePacket job) { Subscriptions = algorithm.SubscriptionManager.Subscriptions; _subscriptions = Subscriptions.Count; //Public Properties: DataFeed = DataFeedEndpoint.FileSystem; IsActive = true; Bridge = new ConcurrentQueue<List<BaseData>>[_subscriptions]; EndOfBridge = new bool[_subscriptions]; SubscriptionReaders = new SubscriptionDataReader[_subscriptions]; FillForwardFrontiers = new DateTime[_subscriptions]; RealtimePrices = new List<decimal>(_subscriptions); //Class Privates: _job = job; _algorithm = algorithm; _endOfStreams = false; _bridgeMax = _bridgeMax / _subscriptions; //Set the bridge maximum count: for (var i = 0; i < _subscriptions; i++) { //Create a new instance in the dictionary: Bridge[i] = new ConcurrentQueue<List<BaseData>>(); EndOfBridge[i] = false; SubscriptionReaders[i] = new SubscriptionDataReader(Subscriptions[i], _algorithm.Securities[Subscriptions[i].Symbol], DataFeed, _job.PeriodStart, _job.PeriodFinish); FillForwardFrontiers[i] = new DateTime(); } }
public TagCloudBuilder(IWordsReader reader, IWordNormalizer normalizer, IWordFilter filter, IAlgorithm algorithm) { this.reader = reader; this.normalizer = normalizer; this.filter = filter; this.algorithm = algorithm; }
public ExecutionManager(IAlgorithm algorithm, IDataManager dataManager) { this.algorithm = algorithm; this.dataManager = dataManager; algorithm.DiagnosticsEvent += OnInternalDiagnosticsEvent; }
public Approach(string title, int warmupRounds, int benchmarkRounds, IAlgorithm algorithm) { Title = title; WarmupRounds = warmupRounds; BenchmarkRounds = benchmarkRounds; Algorithm = algorithm; }
public TagCloudBuilder(IWordsReader reader, IAlgorithm algorithm, IImageWriter writer, IWordsFilter filter) { this.filter = filter; this.reader = reader; this.algorithm = algorithm; this.writer = writer; }
public HomeController(IAlgorithm algorithm, IArrayRepository arrayRepository, IParametersRepository parametersRepository, IResultRepository resultRepository) { Algorithm = algorithm; _arrayRepository = arrayRepository; _parametersRepository = parametersRepository; _resultRepository = resultRepository; }
/// <summary> /// Creates a test live trading data feed with the specified fast forward factor /// </summary> /// <param name="algorithm">The algorithm under analysis</param> /// <param name="job">The job for the algorithm</param> public TestLiveTradingDataFeed(IAlgorithm algorithm, LiveNodePacket job) : base(algorithm, job) { _start = DateTime.Now; _current = DateTime.Now; _tickResolution = TimeSpan.FromSeconds(1); }
/// <summary> /// Creates a new <see cref="ScheduledEvent"/> that will fire before market close by the specified time /// </summary> /// <param name="algorithm">The algorithm instance the event is fo</param> /// <param name="resultHandler">The result handler, used to communicate run time errors</param> /// <param name="start">The date to start the events</param> /// <param name="end">The date to end the events</param> /// <param name="endOfDayDelta">The time difference between the market close and the event, positive time will fire before market close</param> /// <param name="currentUtcTime">Specfies the current time in UTC, before which, no events will be scheduled. Specify null to skip this filter.</param> /// <returns>The new <see cref="ScheduledEvent"/> that will fire near market close each tradeable dat</returns> public static ScheduledEvent EveryAlgorithmEndOfDay(IAlgorithm algorithm, IResultHandler resultHandler, DateTime start, DateTime end, TimeSpan endOfDayDelta, DateTime? currentUtcTime = null) { if (endOfDayDelta >= Time.OneDay) { throw new ArgumentException("Delta must be less than a day", "endOfDayDelta"); } // set up an event to fire every tradeable date for the algorithm as a whole var eodEventTime = Time.OneDay.Subtract(endOfDayDelta); // create enumerable of end of day in algorithm's time zone var times = // for every date any exchange is open in the algorithm from date in Time.EachTradeableDay(algorithm.Securities.Values, start, end) // define the time of day we want the event to fire, a little before midnight let eventTime = date + eodEventTime // convert the event time into UTC let eventUtcTime = eventTime.ConvertToUtc(algorithm.TimeZone) // perform filter to verify it's not before the current time where !currentUtcTime.HasValue || eventUtcTime > currentUtcTime.Value select eventUtcTime; return new ScheduledEvent(CreateEventName("Algorithm", "EndOfDay"), times, (name, triggerTime) => { try { algorithm.OnEndOfDay(); } catch (Exception err) { resultHandler.RuntimeError(String.Format("Runtime error in {0} event: {1}", name, err.Message), err.StackTrace); Log.Error(err, string.Format("ScheduledEvent.{0}:", name)); } }); }
/// <summary> /// Missing mapping to P objects /// KdTree could be refactored to use P object instead of Math.Net /// /// O(k * log n) /// </summary> /// <param name="s"></param> /// <param name="origin"></param> /// <param name="k"></param> /// <param name="conf"></param> /// <returns></returns> public long UpdateKnn(IAlgorithm s, IP origin, KnnConfiguration conf) { if (conf == null) conf = new KnnConfiguration(); if (conf.SameTypeOnly) throw new NotImplementedException(); if (conf.MaxDistance.HasValue) throw new NotImplementedException(); var sw = new Stopwatch(); sw.Start(); var vector = new DenseVector(new[] { origin.X, origin.Y }); var nn = Tree.FindNearestNNeighbors(vector, conf.K).ToList(); s.Knn.Clear(); s.Knn.Origin = origin; s.Knn.K = conf.K; foreach (var i in nn) { var p = new P { X = i[0], Y = i[1] }; var dist = origin.Distance(p.X,p.Y); s.Knn.NNs.Add(new PDist {Point = p, Distance = dist}); } sw.Stop(); return sw.ElapsedMilliseconds; }
/// <summary> /// Инициализация потока для алгоритма /// </summary> /// <param name="alg">Алгоритм</param> /// <param name="tasks">Задания</param> /// <param name="function">Оптимизируемая функция</param> /// public CalculatingThread(IAlgorithm alg,List<ITaskPackage> tasks,BlackBoxFunction function) { this.alg = alg; this.tasks = tasks; this.function = function; }
/******************************************************** * PUBLIC CONSTRUCTOR *********************************************************/ /// <summary> /// Setup the algorithm data, cash, job start end date etc. /// </summary> public BacktestingRealTimeHandler(IAlgorithm algorithm, AlgorithmNodePacket job) { //Initialize: _algorithm = algorithm; _events = new List<RealTimeEvent>(); _job = job; _today = new Dictionary<SecurityType, MarketToday>(); }
public AnalysisController(Sample currentSample, int sampleRate) { this.currentSample = currentSample; this.sampleRate = sampleRate; fftAlgoritm = new FFTAlgorithm(); idftAlgorithm = new IDFTAlgorithm(); }
public Run(IAlgorithm algorithm) : base() { if (algorithm == null) throw new ArgumentNullException(); name = algorithm.Name + " Run (" + algorithm.ExecutionTime.ToString() + ")"; description = ItemDescription; color = Color.Black; Initialize(algorithm); }
// K nearest neighbor protected void UpdateKnnGridStrategy(IAlgorithm s, int max, KnnConfiguration conf) { var g = s.GridContainer; var nn = s.Knn; var square = s.Rectangle.Square; var currRing = new List<IPDist>(); var nextRing = new List<IPDist>(); for (var i = 1; i <= max; i++) { var temp = new List<IPDist>(); foreach (var p in nextRing) { if (p.Distance < i * square) currRing.Add(p); else temp.Add(p); } nextRing.Clear(); nextRing.AddRange(temp); var list = g.GetRing(nn.Origin, i); // First 9 squares, dont include origin if (i == 1) list.AddRange(g.GetSet(nn.Origin).Where(a => !a.Equals(nn.Origin)).ToList()); // Only NN on same type if set if (conf.SameTypeOnly) list = list.Where(a => a.Type == nn.Origin.Type).ToList(); var dataWasAdded = false; foreach (var p in list) { var dist = nn.Origin.Distance(p.X, p.Y); if(dist >= conf.MaxDistance) continue; // not within max distance if (dist < i * square) currRing.Add(new PDist { Point = p, Distance = dist }); else nextRing.Add(new PDist { Point = p, Distance = dist }); dataWasAdded = true; } if(conf.MaxDistance.HasValue && !dataWasAdded) break; // max distance used and no new data was added, then we are done if (currRing.Count >= nn.K) break; // enough neighbors? then done } if (currRing.Count < nn.K) { // Only NN on same type if set currRing.AddRange(conf.SameTypeOnly ? nextRing.Where(a => a.Point.Type == nn.Origin.Type).ToList() : nextRing); if (conf.MaxDistance.HasValue) currRing = currRing.Where(i => i.Distance < conf.MaxDistance.Value).ToList(); } currRing.Sort(); nn.NNs = currRing.Count > nn.K ? currRing.Take(nn.K).ToList() : currRing.ToList(); }
/******************************************************** * PUBLIC CONSTRUCTOR *********************************************************/ /// <summary> /// Initialize the realtime event handler with all information required for triggering daily events. /// </summary> public LiveTradingRealTimeHandler(IAlgorithm algorithm, IDataFeed feed, IResultHandler results, IBrokerage brokerage, AlgorithmNodePacket job) { //Initialize: _algorithm = algorithm; _events = new List<RealTimeEvent>(); _today = new Dictionary<SecurityType, MarketToday>(); _feed = feed; _results = results; }
/// <summary> /// Initializes a new instance of the <see cref="DefaultBrokerageMessageHandler"/> class /// </summary> /// <param name="algorithm">The running algorithm</param> /// <param name="job">The job that produced the algorithm</param> /// <param name="api">The api for the algorithm</param> /// <param name="initialDelay"></param> /// <param name="openThreshold">Defines how long before market open to re-check for brokerage reconnect message</param> public DefaultBrokerageMessageHandler(IAlgorithm algorithm, AlgorithmNodePacket job, IApi api, TimeSpan? initialDelay = null, TimeSpan? openThreshold = null) { _api = api; _job = job; _algorithm = algorithm; _connected = true; _openThreshold = openThreshold ?? DefaultOpenThreshold; _initialDelay = initialDelay ?? DefaultInitialDelay; }
public Chart(IAlgorithm algorithm, int iterations, String name,int itergap, String filename) { this.algorithm = algorithm; this.algorithmName = name; this.iterationNumber = iterations; this.iterGap = itergap; InitializeComponent(); this.Text = "Wykres dla danych pobranych z pliku " + filename; }
public AnalysisLayer(StorageLayer layer) { AstBuilder astBuilder; var method = DecompileUtil.GetMethodCode(layer.Algorithm.GetType(), out astBuilder, "ProcessCell"); this.Name = layer.Algorithm.GetType().Name; this.Code = method.Body.GetTrackedText(); this.Algorithm = layer.Algorithm; this.AstBuilder = astBuilder; }
public abstract VectorPicture GetInitialPicture(IAlgorithm algorithm);
public void SetAlgorithm(IAlgorithm algorithm) { }
public DataManagerStub(IAlgorithm algorithm) : this(new NullDataFeed(), algorithm, new TimeKeeper(DateTime.UtcNow, TimeZones.NewYork)) { }
/// <summary> /// Setup the algorithm cash, dates and portfolio as desired. /// </summary> /// <param name="algorithm">Existing algorithm instance</param> /// <param name="brokerage">New brokerage instance</param> /// <param name="baseJob">Backtesting job</param> /// <returns>Boolean true on successfully setting up the console.</returns> public bool Setup(IAlgorithm algorithm, out IBrokerage brokerage, AlgorithmNodePacket baseJob) { var initializeComplete = false; brokerage = new BacktestingBrokerage(algorithm); try { //Set common variables for console programs: if (baseJob.Type == PacketType.BacktestNode) { var backtestJob = baseJob as BacktestNodePacket; //Set the limits on the algorithm assets (for local no limits) algorithm.SetAssetLimits(999, 999, 999); //Setup Base Algorithm: algorithm.Initialize(); //Construct the backtest job packet: backtestJob.PeriodStart = algorithm.StartDate; backtestJob.PeriodFinish = algorithm.EndDate; backtestJob.BacktestId = "LOCALHOST"; backtestJob.UserId = 1001; backtestJob.Type = PacketType.BacktestNode; //Endpoints: backtestJob.TransactionEndpoint = TransactionHandlerEndpoint.Backtesting; backtestJob.ResultEndpoint = ResultHandlerEndpoint.Console; backtestJob.DataEndpoint = DataFeedEndpoint.FileSystem; backtestJob.RealTimeEndpoint = RealTimeEndpoint.Backtesting; backtestJob.SetupEndpoint = SetupHandlerEndpoint.Console; //Backtest Specific Parameters: StartingDate = backtestJob.PeriodStart; StartingCapital = algorithm.Portfolio.Cash; } else { var liveJob = baseJob as LiveNodePacket; //Live Job Parameters: liveJob.UserId = liveJob.UserId; liveJob.DeployId = "LOCALHOST"; liveJob.IssuedAt = DateTime.Now.Subtract(TimeSpan.FromSeconds(86399 - 60)); //For testing, first access token expires in 60 sec. refresh. liveJob.LifeTime = TimeSpan.FromSeconds(86399); liveJob.AccessToken = "123456"; liveJob.AccountId = 123456; liveJob.RefreshToken = ""; liveJob.Type = PacketType.LiveNode; //Endpoints: liveJob.TransactionEndpoint = TransactionHandlerEndpoint.Backtesting; liveJob.ResultEndpoint = ResultHandlerEndpoint.LiveTrading; bool testLiveTradingEnabled = Config.GetBool("test-live-trading-enabled", defaultValue: false); liveJob.DataEndpoint = testLiveTradingEnabled ? DataFeedEndpoint.Test : DataFeedEndpoint.LiveTrading; liveJob.RealTimeEndpoint = RealTimeEndpoint.LiveTrading; liveJob.SetupEndpoint = SetupHandlerEndpoint.Console; //Call in the paper trading setup: var setup = new PaperTradingSetupHandler(); setup.Setup(algorithm, out brokerage, baseJob); //Live Specific Parameters: StartingDate = DateTime.Now; StartingCapital = algorithm.Portfolio.Cash; } } catch (Exception err) { Log.Error("ConsoleSetupHandler().Setup(): " + err.Message); } if (Errors.Count == 0) { initializeComplete = true; } return(initializeComplete); }
public static double GetDoubleResult(IAlgorithm a, string resultName) { return(((DoubleValue)a.Results[resultName].Value).Value); }
public void Initialize(AlgorithmNodePacket job, IAlgorithm algorithm, IMessagingHandler messagingHandler, IApi api) { }
public void Initialize(IAlgorithm algorithm, IBrokerage brokerage, IResultHandler resultHandler) { }
public void Loading() { _algorithm = Mahjong.GetAlgorithm(); }
public void OnAfterAlgorithmInitialized(IAlgorithm algorithm) { }
/// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="realtime">Realtime processing object</param> /// <param name="token">Cancellation token</param> /// <remarks>Modify with caution</remarks> public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, CancellationToken token) { //Initialize: _dataPointCount = 0; var startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; //Initialize Properties: _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = algorithm.StartDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker <Ticks>(algorithm, methodInvokers); // dividend and split events var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker <Splits>(algorithm, methodInvokers); // Algorithm 3.0 data accessors var hasOnDataSlice = algorithm.GetType().GetMethods() .Where(x => x.Name == "OnData" && x.GetParameters().Length == 1 && x.GetParameters()[0].ParameterType == typeof(Slice)) .FirstOrDefault(x => x.DeclaringType == algorithm.GetType()) != null; //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null && !hasOnDataSlice) { algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } if (genericMethod != null) { methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Trace("AlgorithmManager.Run(): Begin DataStream - Start: " + algorithm.StartDate + " Stop: " + algorithm.EndDate); foreach (var timeSlice in feed.Bridge.GetConsumingEnumerable(token)) { // reset our timer on each loop _currentTimeStepTime = DateTime.UtcNow; //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) { Log.Error(string.Format("AlgorithmManager.Run(): Algorthm state changed to {0} at {1}", _algorithmState, timeSlice.Time)); break; } //Execute with TimeLimit Monitor: if (token.IsCancellationRequested) { Log.Error("AlgorithmManager.Run(): CancellationRequestion at " + timeSlice.Time); return; } var time = timeSlice.Time; var newData = timeSlice.Data; //If we're in backtest mode we need to capture the daily performance. We do this here directly //before updating the algorithm state with the new data from this time step, otherwise we'll //produce incorrect samples (they'll take into account this time step's new price values) if (backtestMode) { //On day-change sample equity and daily performance for statistics calculations if (_previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); //Check for divide by zero if (startingPortfolioValue == 0m) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue; } } //Update algorithm state after capturing performance from previous day //Set the algorithm and real time handler's time algorithm.SetDateTime(time); realtime.SetTime(time); //On each time step push the real time prices to the cashbook so we can have updated conversion rates algorithm.Portfolio.CashBook.Update(newData); //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, newData); // process fill models on the updated data before entering algorithm, applies to all non-market orders transactions.ProcessSynchronousEvents(); //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; Log.Trace("AlgorithmManager.Run(): Algorithm quit requested."); break; } if (algorithm.RunTimeError != null) { _algorithmState = AlgorithmStatus.RuntimeError; Log.Trace(string.Format("AlgorithmManager.Run(): Algorithm encountered a runtime error at {0}. Error: {1}", timeSlice.Time, algorithm.RunTimeError)); break; } // perform margin calls, in live mode we can also use realtime to emit these if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.Now)) { // determine if there are possible margin call orders to be executed bool issueMarginCallWarning; var marginCallOrders = algorithm.Portfolio.ScanForMarginCall(out issueMarginCallWarning); if (marginCallOrders.Count != 0) { try { // tell the algorithm we're about to issue the margin call algorithm.OnMarginCall(marginCallOrders); } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCall: " + err.Message + " STACK >>> " + err.StackTrace); return; } // execute the margin call orders var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var ticket in executedTickets) { algorithm.Error(string.Format("{0} - Executed MarginCallOrder: {1} - Quantity: {2} @ {3}", algorithm.Time, ticket.Symbol, ticket.Quantity, ticket.OrderEvents.Last().FillPrice)); } } // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm else if (issueMarginCallWarning) { try { algorithm.OnMarginCallWarning(); } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCallWarning: " + err.Message + " STACK >>> " + err.StackTrace); } } nextMarginCallTime = time + marginCallFrequency; } //Trigger the data events: Invoke the types we have data for: var newBars = new TradeBars(time); var newTicks = new Ticks(time); var newDividends = new Dividends(time); var newSplits = new Splits(time); //Invoke all non-tradebars, non-ticks methods and build up the TradeBars and Ticks dictionaries // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData.Keys) { //Data point and config of this point: var dataPoints = newData[i]; var config = feed.Subscriptions[i]; //Keep track of how many data points we've processed _dataPointCount += dataPoints.Count; //We don't want to pump data that we added just for currency conversions if (config.IsInternalFeed) { continue; } //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. // Aggregate Dividends and Splits -- invoke portfolio application methods foreach (var dataPoint in dataPoints) { var dividend = dataPoint as Dividend; if (dividend != null) { Log.Trace("AlgorithmManager.Run(): Applying Dividend for " + dividend.Symbol); // if this is a dividend apply to portfolio algorithm.Portfolio.ApplyDividend(dividend); if (hasOnDataDividends) { // and add to our data dictionary to pump into OnData(Dividends data) newDividends.Add(dividend); } continue; } var split = dataPoint as Split; if (split != null) { Log.Trace("AlgorithmManager.Run(): Applying Split for " + split.Symbol); // if this is a split apply to portfolio algorithm.Portfolio.ApplySplit(split); if (hasOnDataSplits) { // and add to our data dictionary to pump into OnData(Splits data) newSplits.Add(split); } continue; } //Update registered consolidators for this symbol index try { for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err.Message); return; } // TRADEBAR -- add to our dictionary if (dataPoint.DataType == MarketDataType.TradeBar) { var bar = dataPoint as TradeBar; if (bar != null) { newBars[bar.Symbol] = bar; continue; } } // TICK -- add to our dictionary if (dataPoint.DataType == MarketDataType.Tick) { var tick = dataPoint as Tick; if (tick != null) { List <Tick> ticks; if (!newTicks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List <Tick>(3); newTicks.Add(tick.Symbol, ticks); } ticks.Add(tick); continue; } } // if it was nothing else then it must be custom data // CUSTOM DATA -- invoke on data method //Send data into the generic algorithm event handlers try { MethodInvoker methodInvoker; if (methodInvokers.TryGetValue(config.Type, out methodInvoker)) { methodInvoker(algorithm, dataPoint); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && newDividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, newDividends); } if (hasOnDataSplits && newSplits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, newSplits); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Dividends/Splits: " + err.Message + " STACK >>> " + err.StackTrace); return; } //After we've fired all other events in this second, fire the pricing events: try { if (hasOnDataTradeBars && newBars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, newBars); } if (hasOnDataTicks && newTicks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, newTicks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } // EVENT HANDLER v3.0 -- all data in a single event var slice = new Slice(time, newData.Values.SelectMany(x => x), newBars.Count == 0 ? null : newBars, newTicks.Count == 0 ? null : newTicks, newSplits.Count == 0 ? null : newSplits, newDividends.Count == 0 ? null : newDividends ); algorithm.OnData(slice); //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach feed.Bridge.GetConsumingEnumerable // stop timing the loops _currentTimeStepTime = DateTime.MinValue; //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithmState = AlgorithmStatus.RuntimeError; algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Error("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated && _liveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(_previousTime, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } // End of Run();
/// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="realtime">Realtime processing object</param> /// <param name="commands">The command queue for relaying extenal commands to the algorithm</param> /// <param name="token">Cancellation token</param> /// <remarks>Modify with caution</remarks> public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, ICommandQueueHandler commands, CancellationToken token) { //Initialize: _dataPointCount = 0; _algorithm = algorithm; var portfolioValue = algorithm.Portfolio.TotalPortfolioValue; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; var settlementScanFrequency = TimeSpan.FromMinutes(30); var nextSettlementScanTime = DateTime.MinValue; var delistingTickets = new List <OrderTicket>(); //Initialize Properties: _algorithmId = job.AlgorithmId; _algorithm.Status = AlgorithmStatus.Running; _previousTime = algorithm.StartDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker <Ticks>(algorithm, methodInvokers); // dividend and split events var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker <Splits>(algorithm, methodInvokers); var hasOnDataDelistings = AddMethodInvoker <Delistings>(algorithm, methodInvokers); var hasOnDataSymbolChangedEvents = AddMethodInvoker <SymbolChangedEvents>(algorithm, methodInvokers); // Algorithm 3.0 data accessors var hasOnDataSlice = algorithm.GetType().GetMethods() .Where(x => x.Name == "OnData" && x.GetParameters().Length == 1 && x.GetParameters()[0].ParameterType == typeof(Slice)) .FirstOrDefault(x => x.DeclaringType == algorithm.GetType()) != null; //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in algorithm.SubscriptionManager.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick" && !config.IsInternalFeed) { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null && !hasOnDataSlice) { algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithm.Status = AlgorithmStatus.RuntimeError; return; } if (genericMethod != null) { methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Trace("AlgorithmManager.Run(): Begin DataStream - Start: " + algorithm.StartDate + " Stop: " + algorithm.EndDate); foreach (var timeSlice in Stream(job, algorithm, feed, results, token)) { // reset our timer on each loop _currentTimeStepTime = DateTime.UtcNow; //Check this backtest is still running: if (_algorithm.Status != AlgorithmStatus.Running) { Log.Error(string.Format("AlgorithmManager.Run(): Algorithm state changed to {0} at {1}", _algorithm.Status, timeSlice.Time)); break; } //Execute with TimeLimit Monitor: if (token.IsCancellationRequested) { Log.Error("AlgorithmManager.Run(): CancellationRequestion at " + timeSlice.Time); return; } // before doing anything, check our command queue foreach (var command in commands.GetCommands()) { if (command == null) { continue; } Log.Trace("AlgorithmManager.Run(): Executing {0}", command); CommandResultPacket result; try { result = command.Run(algorithm); } catch (Exception err) { Log.Error(err); algorithm.Error(string.Format("{0} Error: {1}", command.GetType().Name, err.Message)); result = new CommandResultPacket(command, false); } // send the result of the command off to the result handler results.Messages.Enqueue(result); } var time = timeSlice.Time; _dataPointCount += timeSlice.DataPointCount; //If we're in backtest mode we need to capture the daily performance. We do this here directly //before updating the algorithm state with the new data from this time step, otherwise we'll //produce incorrect samples (they'll take into account this time step's new price values) if (backtestMode) { //On day-change sample equity and daily performance for statistics calculations if (_previousTime.Date != time.Date) { SampleBenchmark(algorithm, results, _previousTime.Date); //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); //Check for divide by zero if (portfolioValue == 0m) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - portfolioValue) * 100 / portfolioValue, 10)); } portfolioValue = algorithm.Portfolio.TotalPortfolioValue; } } else { // live mode continously sample the benchmark SampleBenchmark(algorithm, results, time); } //Update algorithm state after capturing performance from previous day //Set the algorithm and real time handler's time algorithm.SetDateTime(time); if (timeSlice.Slice.SymbolChangedEvents.Count != 0) { if (hasOnDataSymbolChangedEvents) { methodInvokers[typeof(SymbolChangedEvents)](algorithm, timeSlice.Slice.SymbolChangedEvents); } foreach (var symbol in timeSlice.Slice.SymbolChangedEvents.Keys) { // cancel all orders for the old symbol foreach (var ticket in transactions.GetOrderTickets(x => x.Status.IsOpen() && x.Symbol == symbol)) { ticket.Cancel("Open order cancelled on symbol changed event"); } } } if (timeSlice.SecurityChanges != SecurityChanges.None) { foreach (var security in timeSlice.SecurityChanges.AddedSecurities) { if (!algorithm.Securities.ContainsKey(security.Symbol)) { // add the new security algorithm.Securities.Add(security); } } } //On each time step push the real time prices to the cashbook so we can have updated conversion rates foreach (var kvp in timeSlice.CashBookUpdateData) { kvp.Key.Update(kvp.Value); } //Update the securities properties: first before calling user code to avoid issues with data foreach (var kvp in timeSlice.SecuritiesUpdateData) { kvp.Key.SetMarketPrice(kvp.Value); // Send market price updates to the TradeBuilder if (kvp.Value != null) { algorithm.TradeBuilder.SetMarketPrice(kvp.Key.Symbol, kvp.Value.Price); } } // fire real time events after we've updated based on the new data realtime.SetTime(timeSlice.Time); // process fill models on the updated data before entering algorithm, applies to all non-market orders transactions.ProcessSynchronousEvents(); if (delistingTickets.Count != 0) { for (int i = 0; i < delistingTickets.Count; i++) { var ticket = delistingTickets[i]; if (ticket.Status == OrderStatus.Filled) { algorithm.Securities.Remove(ticket.Symbol); delistingTickets.RemoveAt(i--); Log.Trace("AlgorithmManager.Run(): Delisted Security removed: " + ticket.Symbol.ToString()); } } } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.Status == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Algorithm quit requested."); break; } if (algorithm.RunTimeError != null) { _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Trace(string.Format("AlgorithmManager.Run(): Algorithm encountered a runtime error at {0}. Error: {1}", timeSlice.Time, algorithm.RunTimeError)); break; } // perform margin calls, in live mode we can also use realtime to emit these if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.UtcNow)) { // determine if there are possible margin call orders to be executed bool issueMarginCallWarning; var marginCallOrders = algorithm.Portfolio.ScanForMarginCall(out issueMarginCallWarning); if (marginCallOrders.Count != 0) { var executingMarginCall = false; try { // tell the algorithm we're about to issue the margin call algorithm.OnMarginCall(marginCallOrders); executingMarginCall = true; // execute the margin call orders var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var ticket in executedTickets) { algorithm.Error(string.Format("{0} - Executed MarginCallOrder: {1} - Quantity: {2} @ {3}", algorithm.Time, ticket.Symbol, ticket.Quantity, ticket.AverageFillPrice)); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; var locator = executingMarginCall ? "Portfolio.MarginCallModel.ExecuteMarginCall" : "OnMarginCall"; Log.Error(string.Format("AlgorithmManager.Run(): RuntimeError: {0}: ", locator) + err); return; } } // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm else if (issueMarginCallWarning) { try { algorithm.OnMarginCallWarning(); } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnMarginCallWarning: " + err); return; } } nextMarginCallTime = time + marginCallFrequency; } // perform check for settlement of unsettled funds if (time >= nextSettlementScanTime || (_liveMode && nextSettlementScanTime > DateTime.UtcNow)) { algorithm.Portfolio.ScanForCashSettlement(algorithm.UtcTime); nextSettlementScanTime = time + settlementScanFrequency; } // before we call any events, let the algorithm know about universe changes if (timeSlice.SecurityChanges != SecurityChanges.None) { try { algorithm.OnSecuritiesChanged(timeSlice.SecurityChanges); } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: OnSecuritiesChanged event: " + err); return; } } // apply dividends foreach (var dividend in timeSlice.Slice.Dividends.Values) { Log.Trace("AlgorithmManager.Run(): Applying Dividend for " + dividend.Symbol.ToString(), true); algorithm.Portfolio.ApplyDividend(dividend); } // apply splits foreach (var split in timeSlice.Slice.Splits.Values) { try { Log.Trace("AlgorithmManager.Run(): Applying Split for " + split.Symbol.ToString(), true); algorithm.Portfolio.ApplySplit(split); // apply the split to open orders as well in raw mode, all other modes are split adjusted if (_liveMode || algorithm.Securities[split.Symbol].SubscriptionDataConfig.DataNormalizationMode == DataNormalizationMode.Raw) { // in live mode we always want to have our order match the order at the brokerage, so apply the split to the orders var openOrders = transactions.GetOrderTickets(ticket => ticket.Status.IsOpen() && ticket.Symbol == split.Symbol); algorithm.BrokerageModel.ApplySplit(openOrders.ToList(), split); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Split event: " + err); return; } } //Update registered consolidators for this symbol index try { foreach (var kvp in timeSlice.ConsolidatorUpdateData) { var consolidators = kvp.Key.Consolidators; foreach (var dataPoint in kvp.Value) { foreach (var consolidator in consolidators) { consolidator.Update(dataPoint); } } } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err); return; } // fire custom event handlers foreach (var kvp in timeSlice.CustomData) { MethodInvoker methodInvoker; var type = kvp.Key.SubscriptionDataConfig.Type; if (!methodInvokers.TryGetValue(type, out methodInvoker)) { continue; } try { foreach (var dataPoint in kvp.Value) { if (type.IsInstanceOfType(dataPoint)) { methodInvoker(algorithm, dataPoint); } } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err); return; } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && timeSlice.Slice.Dividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, timeSlice.Slice.Dividends); } if (hasOnDataSplits && timeSlice.Slice.Splits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, timeSlice.Slice.Splits); } if (hasOnDataDelistings && timeSlice.Slice.Delistings.Count != 0) { methodInvokers[typeof(Delistings)](algorithm, timeSlice.Slice.Delistings); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Dividends/Splits/Delistings: " + err); return; } // run the delisting logic after firing delisting events HandleDelistedSymbols(algorithm, timeSlice.Slice.Delistings, delistingTickets); //After we've fired all other events in this second, fire the pricing events: try { if (hasOnDataTradeBars && timeSlice.Slice.Bars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, timeSlice.Slice.Bars); } if (hasOnDataTicks && timeSlice.Slice.Ticks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, timeSlice.Slice.Ticks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err); return; } try { if (timeSlice.Slice.HasData) { // EVENT HANDLER v3.0 -- all data in a single event algorithm.OnData(timeSlice.Slice); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithm.Status = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Slice: " + err); return; } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach feed.Bridge.GetConsumingEnumerable // stop timing the loops _currentTimeStepTime = DateTime.MinValue; //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithm.Status = AlgorithmStatus.RuntimeError; algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Error("AlgorithmManager.OnEndOfAlgorithm(): " + err); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithm.Status == AlgorithmStatus.Liquidated && _liveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithm.Status == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithm.Status == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); SampleBenchmark(algorithm, results, _previousTime); results.SamplePerformance(_previousTime, Math.Round((algorithm.Portfolio.TotalPortfolioValue - portfolioValue) * 100 / portfolioValue, 10)); } // End of Run();
private IEnumerable <TimeSlice> Stream(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, IResultHandler results, CancellationToken cancellationToken) { bool setStartTime = false; var timeZone = algorithm.TimeZone; var history = algorithm.HistoryProvider; // get the required history job from the algorithm DateTime?lastHistoryTimeUtc = null; var historyRequests = algorithm.GetWarmupHistoryRequests().ToList(); // initialize variables for progress computation var start = DateTime.UtcNow.Ticks; var nextStatusTime = DateTime.UtcNow.AddSeconds(1); var minimumIncrement = algorithm.UniverseManager .Select(x => x.Value.Configuration.Resolution.ToTimeSpan()) .DefaultIfEmpty(Time.OneSecond) .Min(); minimumIncrement = minimumIncrement == TimeSpan.Zero ? Time.OneSecond : minimumIncrement; if (historyRequests.Count != 0) { // rewrite internal feed requests var subscriptions = algorithm.SubscriptionManager.Subscriptions.Where(x => !x.IsInternalFeed).ToList(); var minResolution = subscriptions.Count > 0 ? subscriptions.Min(x => x.Resolution) : Resolution.Second; foreach (var request in historyRequests) { Security security; if (algorithm.Securities.TryGetValue(request.Symbol, out security) && security.SubscriptionDataConfig.IsInternalFeed) { if (request.Resolution < minResolution) { request.Resolution = minResolution; request.FillForwardResolution = request.FillForwardResolution.HasValue ? minResolution : (Resolution?)null; } } } // rewrite all to share the same fill forward resolution if (historyRequests.Any(x => x.FillForwardResolution.HasValue)) { minResolution = historyRequests.Where(x => x.FillForwardResolution.HasValue).Min(x => x.FillForwardResolution.Value); foreach (var request in historyRequests.Where(x => x.FillForwardResolution.HasValue)) { request.FillForwardResolution = minResolution; } } foreach (var request in historyRequests) { start = Math.Min(request.StartTimeUtc.Ticks, start); Log.Trace(string.Format("AlgorithmManager.Stream(): WarmupHistoryRequest: {0}: Start: {1} End: {2} Resolution: {3}", request.Symbol, request.StartTimeUtc, request.EndTimeUtc, request.Resolution)); } // make the history request and build time slices foreach (var slice in history.GetHistory(historyRequests, timeZone)) { TimeSlice timeSlice; try { // we need to recombine this slice into a time slice var paired = new List <KeyValuePair <Security, List <BaseData> > >(); foreach (var symbol in slice.Keys) { var security = algorithm.Securities[symbol]; var data = slice[symbol]; var list = new List <BaseData>(); var ticks = data as List <Tick>; if (ticks != null) { list.AddRange(ticks); } else { list.Add(data); } paired.Add(new KeyValuePair <Security, List <BaseData> >(security, list)); } timeSlice = TimeSlice.Create(slice.Time.ConvertToUtc(timeZone), timeZone, algorithm.Portfolio.CashBook, paired, SecurityChanges.None); } catch (Exception err) { Log.Error(err); algorithm.RunTimeError = err; yield break; } if (timeSlice != null) { if (!setStartTime) { setStartTime = true; _previousTime = timeSlice.Time; algorithm.Debug("Algorithm warming up..."); } if (DateTime.UtcNow > nextStatusTime) { // send some status to the user letting them know we're done history, but still warming up, // catching up to real time data nextStatusTime = DateTime.UtcNow.AddSeconds(1); var percent = (int)(100 * (timeSlice.Time.Ticks - start) / (double)(DateTime.UtcNow.Ticks - start)); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.History, string.Format("Catching up to realtime {0}%...", percent)); } yield return(timeSlice); lastHistoryTimeUtc = timeSlice.Time; } } } // if we're not live or didn't event request warmup, then set us as not warming up if (!algorithm.LiveMode || historyRequests.Count == 0) { algorithm.SetFinishedWarmingUp(); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running); if (historyRequests.Count != 0) { algorithm.Debug("Algorithm finished warming up."); Log.Trace("AlgorithmManager.Stream(): Finished warmup"); } } foreach (var timeSlice in feed) { if (!setStartTime) { setStartTime = true; _previousTime = timeSlice.Time; } if (algorithm.LiveMode && algorithm.IsWarmingUp) { // this is hand-over logic, we spin up the data feed first and then request // the history for warmup, so there will be some overlap between the data if (lastHistoryTimeUtc.HasValue) { // make sure there's no historical data, this only matters for the handover var hasHistoricalData = false; foreach (var data in timeSlice.Slice.Ticks.Values.SelectMany(x => x).Concat <BaseData>(timeSlice.Slice.Bars.Values)) { // check if any ticks in the list are on or after our last warmup point, if so, skip this data if (data.EndTime.ConvertToUtc(algorithm.Securities[data.Symbol].Exchange.TimeZone) >= lastHistoryTimeUtc) { hasHistoricalData = true; break; } } if (hasHistoricalData) { continue; } // prevent us from doing these checks every loop lastHistoryTimeUtc = null; } // in live mode wait to mark us as finished warming up when // the data feed has caught up to now within the min increment if (timeSlice.Time > DateTime.UtcNow.Subtract(minimumIncrement)) { algorithm.SetFinishedWarmingUp(); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running); algorithm.Debug("Algorithm finished warming up."); Log.Trace("AlgorithmManager.Stream(): Finished warmup"); } else if (DateTime.UtcNow > nextStatusTime) { // send some status to the user letting them know we're done history, but still warming up, // catching up to real time data nextStatusTime = DateTime.UtcNow.AddSeconds(1); var percent = (int)(100 * (timeSlice.Time.Ticks - start) / (double)(DateTime.UtcNow.Ticks - start)); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.History, string.Format("Catching up to realtime {0}%...", percent)); } } yield return(timeSlice); } }
public CutWithoutHmmCommand(IDagGenerator dagGenerator, IRouteGenerator routeGenerator, IAlgorithm algorithm) : base(dagGenerator, routeGenerator, algorithm ?? new NoHmmAlgorithm()) { }
public bool PerformCashSync(IAlgorithm algorithm, DateTime currentTimeUtc, Func <TimeSpan> getTimeSinceLastFill) { return(true); }
public void Initialize(AlgorithmNodePacket job, IAlgorithm algorithm, IMessagingHandler messagingHandler, IApi api, ITransactionHandler transactionHandler) { }
/// <summary> /// Primary entry point to setup a new algorithm /// </summary> /// <param name="algorithm">Algorithm instance</param> /// <param name="brokerage">New brokerage output instance</param> /// <param name="job">Algorithm job task</param> /// <param name="resultHandler">The configured result handler</param> /// <param name="transactionHandler">The configurated transaction handler</param> /// <param name="realTimeHandler">The configured real time handler</param> /// <returns>True on successfully setting up the algorithm state, or false on error.</returns> public bool Setup(IAlgorithm algorithm, IBrokerage brokerage, AlgorithmNodePacket job, IResultHandler resultHandler, ITransactionHandler transactionHandler, IRealTimeHandler realTimeHandler) { // verify we were given the correct job packet type var liveJob = job as LiveNodePacket; if (liveJob == null) { AddInitializationError("BrokerageSetupHandler requires a LiveNodePacket"); return(false); } algorithm.Name = liveJob.GetAlgorithmName(); // verify the brokerage was specified if (string.IsNullOrWhiteSpace(liveJob.Brokerage)) { AddInitializationError("A brokerage must be specified"); return(false); } // attach to the message event to relay brokerage specific initialization messages EventHandler <BrokerageMessageEvent> brokerageOnMessage = (sender, args) => { if (args.Type == BrokerageMessageType.Error) { AddInitializationError($"Brokerage Error Code: {args.Code} - {args.Message}"); } }; try { Log.Trace("BrokerageSetupHandler.Setup(): Initializing algorithm..."); resultHandler.SendStatusUpdate(AlgorithmStatus.Initializing, "Initializing algorithm..."); //Execute the initialize code: var controls = job.Controls; var isolator = new Isolator(); var initializeComplete = isolator.ExecuteWithTimeLimit(TimeSpan.FromSeconds(300), () => { try { //Set the default brokerage model before initialize algorithm.SetBrokerageModel(_factory.BrokerageModel); //Margin calls are disabled by default in live mode algorithm.Portfolio.MarginCallModel = MarginCallModel.Null; //Set our parameters algorithm.SetParameters(job.Parameters); algorithm.SetAvailableDataTypes(GetConfiguredDataFeeds()); //Algorithm is live, not backtesting: algorithm.SetLiveMode(true); //Initialize the algorithm's starting date algorithm.SetDateTime(DateTime.UtcNow); //Set the source impl for the event scheduling algorithm.Schedule.SetEventSchedule(realTimeHandler); // set the option chain provider algorithm.SetOptionChainProvider(new CachingOptionChainProvider(new LiveOptionChainProvider())); // set the future chain provider algorithm.SetFutureChainProvider(new CachingFutureChainProvider(new LiveFutureChainProvider())); // If we're going to receive market data from IB, // set the default subscription limit to 100, // algorithms can override this setting in the Initialize method if (brokerage is InteractiveBrokersBrokerage && liveJob.DataQueueHandler.EndsWith("InteractiveBrokersBrokerage")) { algorithm.Settings.DataSubscriptionLimit = 100; } //Initialise the algorithm, get the required data: algorithm.Initialize(); if (liveJob.Brokerage != "PaperBrokerage") { //Zero the CashBook - we'll populate directly from brokerage foreach (var kvp in algorithm.Portfolio.CashBook) { kvp.Value.SetAmount(0); } } } catch (Exception err) { AddInitializationError(err.ToString(), err); } }, controls.RamAllocation, sleepIntervalMillis: 50); // entire system is waiting on this, so be as fast as possible if (!initializeComplete) { AddInitializationError("Initialization timed out."); return(false); } // let the world know what we're doing since logging in can take a minute resultHandler.SendStatusUpdate(AlgorithmStatus.LoggingIn, "Logging into brokerage..."); brokerage.Message += brokerageOnMessage; Log.Trace("BrokerageSetupHandler.Setup(): Connecting to brokerage..."); try { // this can fail for various reasons, such as already being logged in somewhere else brokerage.Connect(); } catch (Exception err) { Log.Error(err); AddInitializationError( $"Error connecting to brokerage: {err.Message}. " + "This may be caused by incorrect login credentials or an unsupported account type.", err); return(false); } if (!brokerage.IsConnected) { // if we're reporting that we're not connected, bail AddInitializationError("Unable to connect to brokerage."); return(false); } Log.Trace("BrokerageSetupHandler.Setup(): Fetching cash balance from brokerage..."); try { // set the algorithm's cash balance for each currency var cashBalance = brokerage.GetCashBalance(); foreach (var cash in cashBalance) { Log.Trace("BrokerageSetupHandler.Setup(): Setting " + cash.Symbol + " cash to " + cash.Amount); algorithm.Portfolio.SetCash(cash.Symbol, cash.Amount, cash.ConversionRate); } } catch (Exception err) { Log.Error(err); AddInitializationError("Error getting cash balance from brokerage: " + err.Message, err); return(false); } var supportedSecurityTypes = new HashSet <SecurityType> { SecurityType.Equity, SecurityType.Forex, SecurityType.Cfd, SecurityType.Option, SecurityType.Future, SecurityType.Crypto }; var minResolution = new Lazy <Resolution>(() => algorithm.Securities.Select(x => x.Value.Resolution).DefaultIfEmpty(Resolution.Second).Min()); Log.Trace("BrokerageSetupHandler.Setup(): Fetching open orders from brokerage..."); try { GetOpenOrders(algorithm, resultHandler, transactionHandler, brokerage, supportedSecurityTypes, minResolution.Value); } catch (Exception err) { Log.Error(err); AddInitializationError("Error getting open orders from brokerage: " + err.Message, err); return(false); } Log.Trace("BrokerageSetupHandler.Setup(): Fetching holdings from brokerage..."); try { // populate the algorithm with the account's current holdings var holdings = brokerage.GetAccountHoldings(); // add options first to ensure raw data normalization mode is set on the equity underlyings foreach (var holding in holdings.OrderByDescending(x => x.Type)) { Log.Trace("BrokerageSetupHandler.Setup(): Has existing holding: " + holding); // verify existing holding security type if (!supportedSecurityTypes.Contains(holding.Type)) { Log.Error("BrokerageSetupHandler.Setup(): Unsupported security type: " + holding.Type + "-" + holding.Symbol.Value); AddInitializationError("Found unsupported security type in existing brokerage holdings: " + holding.Type + ". " + "QuantConnect currently supports the following security types: " + string.Join(",", supportedSecurityTypes)); // keep aggregating these errors continue; } AddUnrequestedSecurity(algorithm, holding.Symbol, minResolution.Value); algorithm.Portfolio[holding.Symbol].SetHoldings(holding.AveragePrice, holding.Quantity); algorithm.Securities[holding.Symbol].SetMarketPrice(new TradeBar { Time = DateTime.Now, Open = holding.MarketPrice, High = holding.MarketPrice, Low = holding.MarketPrice, Close = holding.MarketPrice, Volume = 0, Symbol = holding.Symbol, DataType = MarketDataType.TradeBar }); } } catch (Exception err) { Log.Error(err); AddInitializationError("Error getting account holdings from brokerage: " + err.Message, err); return(false); } //Finalize Initialization algorithm.PostInitialize(); //Set the starting portfolio value for the strategy to calculate performance: StartingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue; StartingDate = DateTime.Now; } catch (Exception err) { AddInitializationError(err.ToString(), err); } finally { if (brokerage != null) { brokerage.Message -= brokerageOnMessage; } } return(Errors.Count == 0); }
/// <summary> /// Создать экземпляр данных. /// </summary> /// <param name="content"> Данные. </param> /// <param name="algorithm"> Алгоритм для хеширования. </param> public Data(string content, IAlgorithm algorithm) { Content = content; Hash = this.GetHash(algorithm); }
public abstract void Accept(IAlgorithm algorithm);
public void Setup(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IApi api) { }
/// <summary> /// Initializes a new instance of the <see cref="AlgorithmSecurityValuesProvider"/> class /// </summary> /// <param name="algorithm">The wrapped algorithm instance</param> public AlgorithmSecurityValuesProvider(IAlgorithm algorithm) { _algorithm = algorithm; }
public static int GetIntResult(IAlgorithm a, string resultName) { return(((IntValue)a.Results[resultName].Value).Value); }
public void Setup(IAlgorithm algorithm, AlgorithmNodePacket job, IResultHandler resultHandler, IApi api, IIsolatorLimitResultProvider isolatorLimitProvider) { }
/// <summary> /// Creates a new target for the specified percent /// </summary> /// <param name="algorithm">The algorithm instance, used for getting total portfolio value and current security price</param> /// <param name="symbol">The symbol the target is for</param> /// <param name="percent">The requested target percent of total portfolio value</param> /// <returns>A portfolio target for the specified symbol/percent</returns> public static IPortfolioTarget Percent(IAlgorithm algorithm, Symbol symbol, double percent) { return(Percent(algorithm, symbol, percent.SafeDecimalCast())); }
public void SetAlgorithm(IAlgorithm algorithm, decimal startingPortfolioValue) { }
public DataManagerStub(IDataFeed dataFeed, IAlgorithm algorithm) : this(dataFeed, algorithm, new TimeKeeper(DateTime.UtcNow, TimeZones.NewYork)) { }
public void Run() { if (A == null) { MessageBox.Show("Załaduj najpierw dane"); return; } IAlgorithm algorithm = null; string name = "Algorytm"; int iterations = 0; //wybieranie algorytmu switch (SelectedTab) { case 0: algorithm = new AntColony(_antColony.Ants, _antColony.MaxAssigns, _antColony.Alpha, _antColony.Beta, _antColony.Rho, _antColony.q, _antColony.Q0, _antColony.T0, _antColony.Q); name = "AlgorytmMrówkowy"; iterations = _antColony.MaxAssigns; break; case 1: algorithm = new BeesAlgorithm() { M = BeeAlgorithm.M, Imax = BeeAlgorithm.Imax, E = beeAlgorithm.E, Ngh = beeAlgorithm.Ngh, Nsp = beeAlgorithm.Nsp, Nb = beeAlgorithm.Nb, Nep = beeAlgorithm.Nep }; name = "AlgorytmPszczeli"; iterations = BeeAlgorithm.Imax; break; case 2: algorithm = new FireflyAlgorithm() { M = FireflyAlgorithm.M, Imax = FireflyAlgorithm.Imax, Gamma = FireflyAlgorithm.Gamma, Alfa = FireflyAlgorithm.Alfa }; name = "AlgorytmŚwietlikowy"; iterations = FireflyAlgorithm.Imax; break; case 3: algorithm = new BeeAlgorithm(this.beeAlgorithmSBC.TotalNumberBees, this.beeAlgorithmSBC.NumberScout, this.beeAlgorithmSBC.MaxNumberVisits, this.beeAlgorithmSBC.MaxNumberCycles, this.beeAlgorithmSBC.ProbPersuasion, this.beeAlgorithmSBC.ProbMistake); name = "AlgorytmPszczeliBSC"; iterations = FireflyAlgorithm.Imax; break; } if (algorithm == null) { MessageBox.Show("Nie załadowano algorytmu."); return; } //ładowanie algorytmu algorithm.SetTestData((int[, ])A.Clone(), (int[, ])B.Clone(), size); var sth = new Chart(algorithm, iterations, name, this.iterationGap, filename); sth.Show(); }
/// <summary> /// Creates a new BacktestingBrokerage for the specified algorithm /// </summary> /// <param name="algorithm">The algorithm instance</param> public BacktestingBrokerage(IAlgorithm algorithm) : base("Backtesting Brokerage") { Algorithm = algorithm; _pending = new ConcurrentDictionary<int, Order>(); }
public DataManagerStub(IDataFeed dataFeed, IAlgorithm algorithm, ITimeKeeper timeKeeper) : this(dataFeed, algorithm, timeKeeper, MarketHoursDatabase.FromDataFolder(), SymbolPropertiesDatabase.FromDataFolder()) { }
public DataManagerStub(IAlgorithm algorithm, ITimeKeeper timeKeeper) : this(new NullDataFeed(), algorithm, timeKeeper) { }
/// <summary> /// Creates a new BacktestingBrokerage for the specified algorithm /// </summary> /// <param name="algorithm">The algorithm instance</param> /// <param name="name">The name of the brokerage</param> protected BacktestingBrokerage(IAlgorithm algorithm, string name) : base(name) { Algorithm = algorithm; _pending = new ConcurrentDictionary<int, Order>(); }