/// <summary> /// Create a new lean data writer to this base data directory. /// </summary> /// <param name="dataDirectory">Base data directory</param> /// <param name="resolution">Resolution of the desired output data</param> /// <param name="securityType">The security type</param> /// <param name="tickType">The tick type</param> /// <param name="dataCacheProvider">The data cache provider to use</param> /// <param name="writePolicy">The file write policy to use</param> public LeanDataWriter(string dataDirectory, Resolution resolution, SecurityType securityType, TickType tickType, IDataCacheProvider dataCacheProvider = null, WritePolicy?writePolicy = null) { _dataDirectory = dataDirectory; _resolution = resolution; _securityType = securityType; _tickType = tickType; if (writePolicy == null) { _writePolicy = resolution >= Resolution.Hour ? WritePolicy.Merge : WritePolicy.Overwrite; } else { _writePolicy = writePolicy.Value; } _dataCacheProvider = dataCacheProvider ?? new DiskDataCacheProvider(); }
/// <summary> /// Initializes a new instance of the <see cref="TextSubscriptionDataSourceReader"/> class /// </summary> /// <param name="dataCacheProvider">This provider caches files if needed</param> /// <param name="config">The subscription's configuration</param> /// <param name="date">The date this factory was produced to read data for</param> /// <param name="isLiveMode">True if we're in live mode, false for backtesting</param> public TextSubscriptionDataSourceReader(IDataCacheProvider dataCacheProvider, SubscriptionDataConfig config, DateTime date, bool isLiveMode) : base(dataCacheProvider, isLiveMode) { _date = date; _config = config; _shouldCacheDataPoints = !_config.IsCustomData && _config.Resolution >= Resolution.Hour && _config.Type != typeof(FineFundamental) && _config.Type != typeof(CoarseFundamental) && !DataCacheProvider.IsDataEphemeral; var method = _config.Type.GetMethod("Reader", new[] { typeof(SubscriptionDataConfig), typeof(StreamReader), typeof(DateTime), typeof(bool) }); if (method != null && method.DeclaringType == _config.Type) { _implementsStreamReader = true; } }
/// <summary> /// Initializes a new instance of the <see cref="HistoryProviderInitializeParameters"/> class from the specified parameters /// </summary> /// <param name="job">The job</param> /// <param name="api">The API instance</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <param name="dataCacheProvider">Provider used to cache history data files</param> /// <param name="mapFileProvider">Provider used to get a map file resolver to handle equity mapping</param> /// <param name="factorFileProvider">Provider used to get factor files to handle equity price scaling</param> /// <param name="statusUpdateAction">Function used to send status updates</param> public HistoryProviderInitializeParameters( AlgorithmNodePacket job, IApi api, IDataProvider dataProvider, IDataCacheProvider dataCacheProvider, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, Action <int> statusUpdateAction) { Job = job; Api = api; DataProvider = dataProvider; DataCacheProvider = dataCacheProvider; MapFileProvider = mapFileProvider; FactorFileProvider = factorFileProvider; StatusUpdateAction = statusUpdateAction; }
/// <summary> /// Creates a new instance of this <see cref="ISubscriptionDataSourceReader"/> /// </summary> public IndexSubscriptionDataSourceReader(IDataCacheProvider dataCacheProvider, SubscriptionDataConfig config, DateTime date, bool isLiveMode, IDataProvider dataProvider) : base(dataCacheProvider, isLiveMode) { _config = config; _date = date; _dataProvider = dataProvider; _factory = config.Type.GetBaseDataInstance() as IndexedBaseData; if (_factory == null) { throw new ArgumentException($"{nameof(IndexSubscriptionDataSourceReader)} should be used" + $"with a data type which implements {nameof(IndexedBaseData)}"); } }
/// <summary> /// Initializes a new instance of the <see cref="SubscriptionDataReaderSubscriptionEnumeratorFactory"/> class /// </summary> /// <param name="resultHandler">The result handler for the algorithm</param> /// <param name="mapFileProvider">The map file provider</param> /// <param name="factorFileProvider">The factor file provider</param> /// <param name="cacheProvider">Provider used to get data when it is not present on disk</param> /// <param name="tradableDaysProvider">Function used to provide the tradable dates to be enumerator. /// Specify null to default to <see cref="SubscriptionRequest.TradableDays"/></param> /// <param name="enablePriceScaling">Applies price factor</param> public SubscriptionDataReaderSubscriptionEnumeratorFactory(IResultHandler resultHandler, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, IDataCacheProvider cacheProvider, Func <SubscriptionRequest, IEnumerable <DateTime> > tradableDaysProvider = null, bool enablePriceScaling = true ) { _resultHandler = resultHandler; _mapFileProvider = mapFileProvider; _factorFileProvider = factorFileProvider; _dataCacheProvider = cacheProvider; _numericalPrecisionLimitedWarnings = new ConcurrentDictionary <Symbol, string>(); _startDateLimitedWarnings = new ConcurrentDictionary <Symbol, string>(); _isLiveMode = false; _tradableDaysProvider = tradableDaysProvider ?? (request => request.TradableDays); _enablePriceScaling = enablePriceScaling; }
/// <summary> /// Initializes a new instance of the <see cref="RemoteFileSubscriptionStreamReader"/> class. /// </summary> /// <param name="dataCacheProvider">The <see cref="IDataCacheProvider"/> used to retrieve a stream of data</param> /// <param name="source">The remote url to be downloaded via web client</param> /// <param name="downloadDirectory">The local directory and destination of the download</param> public RemoteFileSubscriptionStreamReader(IDataCacheProvider dataCacheProvider, string source, string downloadDirectory) { // create a hash for a new filename var filename = Guid.NewGuid() + source.GetExtension(); var destination = Path.Combine(downloadDirectory, filename); using (var client = new WebClient()) { client.Proxy = WebRequest.GetSystemWebProxy(); client.DownloadFile(source, destination); } // Send the file to the dataCacheProvider so it is available when the streamReader asks for it dataCacheProvider.Store(destination, File.ReadAllBytes(destination)); // now we can just use the local file reader _streamReader = new LocalFileSubscriptionStreamReader(dataCacheProvider, destination); }
private QCAlgorithm GetAlgorithm(IDataCacheProvider cacheProvider, DateTime dateTime) { var algorithm = new QCAlgorithm(); algorithm.SubscriptionManager.SetDataManager(new DataManagerStub(algorithm)); algorithm.HistoryProvider = new SubscriptionDataReaderHistoryProvider(); algorithm.SetDateTime(dateTime.ConvertToUtc(algorithm.TimeZone)); algorithm.HistoryProvider.Initialize(new HistoryProviderInitializeParameters( null, null, _dataProvider, cacheProvider, _mapFileProvider, _factorFileProvider, null, false, new DataPermissionManager())); return(algorithm); }
/// <summary> /// Subscription data reader takes a subscription request, loads the type, accepts the data source and enumerate on the results. /// </summary> /// <param name="config">Subscription configuration object</param> /// <param name="dataRequest">The data request</param> /// <param name="mapFileProvider">Used for resolving the correct map files</param> /// <param name="factorFileProvider">Used for getting factor files</param> /// <param name="dataCacheProvider">Used for caching files</param> /// <param name="dataProvider">The data provider to use</param> public SubscriptionDataReader(SubscriptionDataConfig config, BaseDataRequest dataRequest, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, IDataCacheProvider dataCacheProvider, IDataProvider dataProvider) { //Save configuration of data-subscription: _config = config; //Save Start and End Dates: _periodStart = dataRequest.StartTimeLocal; _periodFinish = dataRequest.EndTimeLocal; _mapFileProvider = mapFileProvider; _factorFileProvider = factorFileProvider; _dataCacheProvider = dataCacheProvider; //Save access to securities _tradeableDates = dataRequest.TradableDays.GetEnumerator(); _dataProvider = dataProvider; }
/// <summary> /// Initializes a new instance of the <see cref="HistoryProviderInitializeParameters"/> class from the specified parameters /// </summary> /// <param name="job">The job</param> /// <param name="api">The API instance</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <param name="dataCacheProvider">Provider used to cache history data files</param> /// <param name="mapFileProvider">Provider used to get a map file resolver to handle equity mapping</param> /// <param name="factorFileProvider">Provider used to get factor files to handle equity price scaling</param> /// <param name="statusUpdateAction">Function used to send status updates</param> /// <param name="parallelHistoryRequestsEnabled">True if parallel history requests are enabled</param> /// <param name="dataPermissionManager">The data permission manager to use</param> public HistoryProviderInitializeParameters( AlgorithmNodePacket job, IApi api, IDataProvider dataProvider, IDataCacheProvider dataCacheProvider, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, Action <int> statusUpdateAction, bool parallelHistoryRequestsEnabled, IDataPermissionManager dataPermissionManager) { Job = job; Api = api; DataProvider = dataProvider; DataCacheProvider = dataCacheProvider; MapFileProvider = mapFileProvider; FactorFileProvider = factorFileProvider; StatusUpdateAction = statusUpdateAction; ParallelHistoryRequestsEnabled = parallelHistoryRequestsEnabled; DataPermissionManager = dataPermissionManager; }
/// <summary> /// <see cref = "QuantBook" /> constructor. /// Provides access to data for quantitative analysis /// </summary> public QuantBook() { try { using (Py.GIL()) { _pandas = Py.Import("pandas"); } var composer = new Composer(); var algorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(composer); _dataCacheProvider = new SingleEntryDataCacheProvider(algorithmHandlers.DataProvider); var mapFileProvider = algorithmHandlers.MapFileProvider; _historyProvider = composer.GetExportedValueByTypeName <IHistoryProvider>(Config.Get("history-provider", "SubscriptionDataReaderHistoryProvider")); _historyProvider.Initialize(null, algorithmHandlers.DataProvider, _dataCacheProvider, mapFileProvider, algorithmHandlers.FactorFileProvider, null); } catch (Exception exception) { throw new Exception("QuantBook.Main(): " + exception); } }
/// <summary> /// Create a new lean data writer to this base data directory. /// </summary> /// <param name="symbol">Symbol string</param> /// <param name="dataDirectory">Base data directory</param> /// <param name="resolution">Resolution of the desired output data</param> /// <param name="tickType">The tick type</param> /// <param name="dataCacheProvider">The data cache provider to use</param> /// <param name="writePolicy">The file write policy to use</param> public LeanDataWriter(Resolution resolution, Symbol symbol, string dataDirectory, TickType tickType = TickType.Trade, IDataCacheProvider dataCacheProvider = null, WritePolicy?writePolicy = null) : this( dataDirectory, resolution, symbol.ID.SecurityType, tickType, dataCacheProvider, writePolicy ) { _symbol = symbol; // All fx data is quote data. if (_securityType == SecurityType.Forex || _securityType == SecurityType.Cfd) { _tickType = TickType.Quote; } if (_securityType != SecurityType.Equity && _securityType != SecurityType.Forex && _securityType != SecurityType.Cfd && _securityType != SecurityType.Crypto && _securityType != SecurityType.Future && _securityType != SecurityType.Option && _securityType != SecurityType.FutureOption && _securityType != SecurityType.Index && _securityType != SecurityType.IndexOption) { throw new NotImplementedException("Sorry this security type is not yet supported by the LEAN data writer: " + _securityType); } }
public static void ComputeAndUpdateCacheForAllSites( ICacheRepositoryFactory <ITransactionCacheRepository> transactionCacheRepositoryFactory, IDataCacheProvider <EfficiencyTimesheetCache> timesheetCacheProvider, IEmployeeEfficiencyCacheRepository liveEfficiencyCache, ILiveEfficiencyEngine liveEfficiencyEngine, DateTime start, DateTime end) { List <SiteEmployeeCode> siteEmployeeCodes = new List <SiteEmployeeCode>(); var transactionCache = transactionCacheRepositoryFactory.GetCurrent(); var transactionSiteCodes = transactionCache.GetSiteCodes(); foreach (var siteCode in transactionSiteCodes) { siteEmployeeCodes.AddRange(transactionCache.GetTransactionSiteEmployeeCodes(siteCode) .Select(x => new SiteEmployeeCode { SiteCode = siteCode, EmployeeCode = x })); } List <SiteEmployeeCode> tnaEmployeeCodes = new List <SiteEmployeeCode>(); var timeSheetCache = timesheetCacheProvider.GetCurrent(); var timeSheetSiteCodes = timeSheetCache.GetSiteCodes(); foreach (var siteCode in timeSheetSiteCodes) { tnaEmployeeCodes.AddRange(timeSheetCache.GetAll(siteCode) .GroupBy(r => r.TnaEmployeeCode) .Select(x => new SiteEmployeeCode { SiteCode = siteCode, EmployeeCode = x.Key }) .ToArray()); } liveEfficiencyEngine.ComputeForSiteCodesAndExecute( siteEmployeeCodes.GroupBy(x => x.SiteCode).ToDictionary(r => r.Key, r => r.ToArray()), tnaEmployeeCodes.GroupBy(x => x.SiteCode).ToDictionary(r => r.Key, r => r.ToArray()), start, end, (calculatedEfficiency) => EfficiencyCacheHelper.UpdateLiveEfficiencyCache(calculatedEfficiency, liveEfficiencyCache, start, end, true)); }
/// <summary> /// Initializes a new instance of the <see cref="RemoteFileSubscriptionStreamReader"/> class. /// </summary> /// <param name="dataCacheProvider">The <see cref="IDataCacheProvider"/> used to retrieve a stream of data</param> /// <param name="source">The remote url to be downloaded via web client</param> /// <param name="downloadDirectory">The local directory and destination of the download</param> /// <param name="headers">Defines header values to add to the request</param> public RemoteFileSubscriptionStreamReader(IDataCacheProvider dataCacheProvider, string source, string downloadDirectory, IEnumerable <KeyValuePair <string, string> > headers) { // don't use cache if data is ephemeral // will be false for live history requests and live subscriptions var useCache = !dataCacheProvider.IsDataEphemeral; // create a hash for a new filename var filename = (useCache ? source.ToMD5() : Guid.NewGuid().ToString()) + source.GetExtension(); var destination = Path.Combine(downloadDirectory, filename); string contents = null; if (useCache) { lock (_fileSystemLock) { if (!File.Exists(destination)) { contents = _downloader.Download(source, headers, null, null); File.WriteAllText(destination, contents); } } } else { contents = _downloader.Download(source, headers, null, null); File.WriteAllText(destination, contents); } if (contents != null) { // Send the file to the dataCacheProvider so it is available when the streamReader asks for it dataCacheProvider.Store(destination, System.Text.Encoding.UTF8.GetBytes(contents)); } // now we can just use the local file reader _streamReader = new LocalFileSubscriptionStreamReader(dataCacheProvider, destination); }
/// <summary> /// Creates a new instance /// </summary> /// <param name="universeSelection">The universe selection instance</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="brokerage">New brokerage output instance</param> /// <param name="algorithmNodePacket">Algorithm job task</param> /// <param name="resultHandler">The configured result handler</param> /// <param name="transactionHandler">The configured transaction handler</param> /// <param name="realTimeHandler">The configured real time handler</param> /// <param name="objectStore">The configured object store</param> /// <param name="dataCacheProvider">The configured data cache provider</param> /// <param name="mapFileProvider">The map file provider</param> public SetupHandlerParameters(UniverseSelection universeSelection, IAlgorithm algorithm, IBrokerage brokerage, AlgorithmNodePacket algorithmNodePacket, IResultHandler resultHandler, ITransactionHandler transactionHandler, IRealTimeHandler realTimeHandler, IObjectStore objectStore, IDataCacheProvider dataCacheProvider, IMapFileProvider mapFileProvider ) { UniverseSelection = universeSelection; Algorithm = algorithm; Brokerage = brokerage; AlgorithmNodePacket = algorithmNodePacket; ResultHandler = resultHandler; TransactionHandler = transactionHandler; RealTimeHandler = realTimeHandler; ObjectStore = objectStore; DataCacheProvider = dataCacheProvider; MapFileProvider = mapFileProvider; }
/// <summary> /// Subscription data reader takes a subscription request, loads the type, accepts the data source and enumerate on the results. /// </summary> /// <param name="config">Subscription configuration object</param> /// <param name="periodStart">Start date for the data request/backtest</param> /// <param name="periodFinish">Finish date for the data request/backtest</param> /// <param name="mapFileResolver">Used for resolving the correct map files</param> /// <param name="factorFileProvider">Used for getting factor files</param> /// <param name="dataCacheProvider">Used for caching files</param> /// <param name="tradeableDates">Defines the dates for which we'll request data, in order, in the security's data time zone</param> /// <param name="isLiveMode">True if we're in live mode, false otherwise</param> public SubscriptionDataReader(SubscriptionDataConfig config, DateTime periodStart, DateTime periodFinish, MapFileResolver mapFileResolver, IFactorFileProvider factorFileProvider, IEnumerable <DateTime> tradeableDates, bool isLiveMode, IDataCacheProvider dataCacheProvider) { //Save configuration of data-subscription: _config = config; //Save Start and End Dates: _periodStart = periodStart; _periodFinish = periodFinish; _mapFileResolver = mapFileResolver; _factorFileProvider = factorFileProvider; _dataCacheProvider = dataCacheProvider; //Save access to securities _isLiveMode = isLiveMode; _tradeableDates = tradeableDates.GetEnumerator(); }
/// <summary> /// Initializes a new instance of the <see cref="TextSubscriptionDataSourceReader"/> class /// </summary> /// <param name="dataCacheProvider">This provider caches files if needed</param> /// <param name="config">The subscription's configuration</param> /// <param name="date">The date this factory was produced to read data for</param> /// <param name="isLiveMode">True if we're in live mode, false for backtesting</param> public TextSubscriptionDataSourceReader(IDataCacheProvider dataCacheProvider, SubscriptionDataConfig config, DateTime date, bool isLiveMode) : base(dataCacheProvider, isLiveMode) { _date = date; _config = config; _shouldCacheDataPoints = !_config.IsCustomData && _config.Resolution >= Resolution.Hour && _config.Type != typeof(FineFundamental) && _config.Type != typeof(CoarseFundamental) && !DataCacheProvider.IsDataEphemeral; // we know these type implement the streamReader interface lets avoid dynamic reflection call to figure it out if (_config.Type == typeof(TradeBar) || _config.Type == typeof(QuoteBar) || _config.Type == typeof(Tick)) { _implementsStreamReader = true; } else { var method = _config.Type.GetMethod("Reader", new[] { typeof(SubscriptionDataConfig), typeof(StreamReader), typeof(DateTime), typeof(bool) }); if (method != null && method.DeclaringType == _config.Type) { _implementsStreamReader = true; } } }
/// <summary> /// <see cref = "QuantBook" /> constructor. /// Provides access to data for quantitative analysis /// </summary> public QuantBook() : base() { try { using (Py.GIL()) { _pandas = Py.Import("pandas"); } // Issue #4892 : Set start time relative to NY time // when the data is available from the previous day var newYorkTime = DateTime.UtcNow.ConvertFromUtc(TimeZones.NewYork); var hourThreshold = Config.GetInt("qb-data-hour", 9); // If it is after our hour threshold; then we can use today if (newYorkTime.Hour >= hourThreshold) { SetStartDate(newYorkTime); } else { SetStartDate(newYorkTime - TimeSpan.FromDays(1)); } // Sets PandasConverter SetPandasConverter(); // Reset our composer; needed for re-creation of QuantBook Composer.Instance.Reset(); var composer = Composer.Instance; // Create our handlers with our composer instance var algorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(composer); var systemHandlers = LeanEngineSystemHandlers.FromConfiguration(composer); // init the API systemHandlers.Initialize(); systemHandlers.LeanManager.Initialize(systemHandlers, algorithmHandlers, new BacktestNodePacket(), new AlgorithmManager(false)); systemHandlers.LeanManager.SetAlgorithm(this); algorithmHandlers.ObjectStore.Initialize("QuantBook", Config.GetInt("job-user-id"), Config.GetInt("project-id"), Config.Get("api-access-token"), new Controls { // if <= 0 we disable periodic persistence and make it synchronous PersistenceIntervalSeconds = -1, StorageLimitMB = Config.GetInt("storage-limit-mb", 5), StorageFileCount = Config.GetInt("storage-file-count", 100), StoragePermissions = (FileAccess)Config.GetInt("storage-permissions", (int)FileAccess.ReadWrite) }); SetObjectStore(algorithmHandlers.ObjectStore); _dataCacheProvider = new ZipDataCacheProvider(algorithmHandlers.DataProvider); _dataProvider = algorithmHandlers.DataProvider; var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var registeredTypes = new RegisteredSecurityDataTypesProvider(); var securityService = new SecurityService(Portfolio.CashBook, MarketHoursDatabase, symbolPropertiesDataBase, this, registeredTypes, new SecurityCacheProvider(Portfolio)); Securities.SetSecurityService(securityService); SubscriptionManager.SetDataManager( new DataManager(new NullDataFeed(), new UniverseSelection(this, securityService, algorithmHandlers.DataPermissionsManager, algorithmHandlers.DataProvider), this, TimeKeeper, MarketHoursDatabase, false, registeredTypes, algorithmHandlers.DataPermissionsManager)); var mapFileProvider = algorithmHandlers.MapFileProvider; HistoryProvider = composer.GetExportedValueByTypeName <IHistoryProvider>(Config.Get("history-provider", "SubscriptionDataReaderHistoryProvider")); HistoryProvider.Initialize( new HistoryProviderInitializeParameters( null, null, algorithmHandlers.DataProvider, _dataCacheProvider, mapFileProvider, algorithmHandlers.FactorFileProvider, null, true, algorithmHandlers.DataPermissionsManager ) ); SetOptionChainProvider(new CachingOptionChainProvider(new BacktestingOptionChainProvider(_dataProvider))); SetFutureChainProvider(new CachingFutureChainProvider(new BacktestingFutureChainProvider(_dataProvider))); } catch (Exception exception) { throw new Exception("QuantBook.Main(): " + exception); } }
/// <summary> /// Initializes this history provider to work for the specified job /// </summary> /// <param name="parameters">The initialization parameters</param> public override void Initialize(HistoryProviderInitializeParameters parameters) { _mapFileProvider = parameters.MapFileProvider; _factorFileProvider = parameters.FactorFileProvider; _dataCacheProvider = parameters.DataCacheProvider; }
/// <summary> /// <see cref = "QuantBook" /> constructor. /// Provides access to data for quantitative analysis /// </summary> public QuantBook() : base() { try { using (Py.GIL()) { _pandas = Py.Import("pandas"); } // By default, set start date to end data which is yesterday SetStartDate(EndDate); // Sets PandasConverter SetPandasConverter(); // Initialize History Provider var composer = new Composer(); var algorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(composer); var systemHandlers = LeanEngineSystemHandlers.FromConfiguration(composer); systemHandlers.LeanManager.Initialize(systemHandlers, algorithmHandlers, new BacktestNodePacket(), new AlgorithmManager(false)); systemHandlers.LeanManager.SetAlgorithm(this); _dataCacheProvider = new ZipDataCacheProvider(algorithmHandlers.DataProvider); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var registeredTypes = new RegisteredSecurityDataTypesProvider(); var securityService = new SecurityService(Portfolio.CashBook, MarketHoursDatabase, symbolPropertiesDataBase, this, registeredTypes); Securities.SetSecurityService(securityService); SubscriptionManager.SetDataManager( new DataManager(new NullDataFeed(), new UniverseSelection(this, securityService), this, TimeKeeper, MarketHoursDatabase, false, registeredTypes)); var mapFileProvider = algorithmHandlers.MapFileProvider; HistoryProvider = composer.GetExportedValueByTypeName <IHistoryProvider>(Config.Get("history-provider", "SubscriptionDataReaderHistoryProvider")); HistoryProvider.Initialize( new HistoryProviderInitializeParameters( null, null, algorithmHandlers.DataProvider, _dataCacheProvider, mapFileProvider, algorithmHandlers.FactorFileProvider, null ) ); SetOptionChainProvider(new CachingOptionChainProvider(new BacktestingOptionChainProvider())); SetFutureChainProvider(new CachingFutureChainProvider(new BacktestingFutureChainProvider())); } catch (Exception exception) { throw new Exception("QuantBook.Main(): " + exception); } }
/// <summary> /// Initializes this history provider to work for the specified job /// </summary> /// <param name="job">The job</param> /// <param name="mapFileProvider">Provider used to get a map file resolver to handle equity mapping</param> /// <param name="factorFileProvider">Provider used to get factor files to handle equity price scaling</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <param name="statusUpdate">Function used to send status updates</param> /// <param name="dataCacheProvider">Provider used to cache history data files</param> public void Initialize(AlgorithmNodePacket job, IDataProvider dataProvider, IDataCacheProvider dataCacheProvider, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, Action <int> statusUpdate) { Connect(); }
protected override ISubscriptionDataSourceReader GetSubscriptionDataSourceReader(SubscriptionDataSource source, IDataCacheProvider dataCacheProvider, SubscriptionDataConfig config, DateTime date) { return(_dataSourceReader); }
/// <summary> /// Gets the <see cref="ISubscriptionDataSourceReader"/> for the specified source /// </summary> protected virtual ISubscriptionDataSourceReader GetSubscriptionDataSourceReader(SubscriptionDataSource source, IDataCacheProvider dataCacheProvider, SubscriptionDataConfig config, DateTime date ) { return(SubscriptionDataSourceReader.ForSource(source, dataCacheProvider, config, date, true)); }
protected TimesheetEventHandler(IInitEfficiencyRunner initEfficiencyRunner, ICacheRepositoryFactory <ITransactionCacheRepository> transactionCacheRepositoryFactory, ICacheRepositoryFactory <IEmployeeEfficiencyCacheRepository> efficiencyCacheRepositoryFactory, IDataCacheProvider <EfficiencyTimesheetCache> timesheetCacheProvider, ILiveEfficiencyEngine liveEfficiencyEngine) { _initEfficiencyRunner = initEfficiencyRunner; _transactionCacheRepositoryFactory = transactionCacheRepositoryFactory; _efficiencyCacheRepositoryFactory = efficiencyCacheRepositoryFactory; _timesheetCacheProvider = timesheetCacheProvider; _liveEfficiencyEngine = liveEfficiencyEngine; }
/// <summary> /// Creates an instance of the PortfolioLooper class /// </summary> /// <param name="startingCash">Equity curve</param> /// <param name="orders">Order events</param> /// <param name="resolution">Optional parameter to override default resolution (Hourly)</param> private PortfolioLooper(double startingCash, List <Order> orders, Resolution resolution = _resolution) { // Initialize the providers that the HistoryProvider requires var factorFileProvider = Composer.Instance.GetExportedValueByTypeName <IFactorFileProvider>("LocalDiskFactorFileProvider"); var mapFileProvider = Composer.Instance.GetExportedValueByTypeName <IMapFileProvider>("LocalDiskMapFileProvider"); _cacheProvider = new ZipDataCacheProvider(new DefaultDataProvider(), false); var historyProvider = new SubscriptionDataReaderHistoryProvider(); var dataPermissionManager = new DataPermissionManager(); historyProvider.Initialize(new HistoryProviderInitializeParameters(null, null, null, _cacheProvider, mapFileProvider, factorFileProvider, (_) => { }, false, dataPermissionManager)); Algorithm = new PortfolioLooperAlgorithm((decimal)startingCash, orders); Algorithm.SetHistoryProvider(historyProvider); // Dummy LEAN datafeed classes and initializations that essentially do nothing var job = new BacktestNodePacket(1, 2, "3", null, 9m, $""); var feed = new MockDataFeed(); // Create MHDB and Symbol properties DB instances for the DataManager var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); _dataManager = new DataManager(feed, new UniverseSelection( Algorithm, new SecurityService(Algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, Algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(Algorithm.Portfolio)), dataPermissionManager, new DefaultDataProvider()), Algorithm, Algorithm.TimeKeeper, marketHoursDatabase, false, RegisteredSecurityDataTypesProvider.Null, dataPermissionManager); _securityService = new SecurityService(Algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDataBase, Algorithm, RegisteredSecurityDataTypesProvider.Null, new SecurityCacheProvider(Algorithm.Portfolio)); var transactions = new BacktestingTransactionHandler(); _resultHandler = new BacktestingResultHandler(); // Initialize security services and other properties so that we // don't get null reference exceptions during our re-calculation Algorithm.Securities.SetSecurityService(_securityService); Algorithm.SubscriptionManager.SetDataManager(_dataManager); // Initializes all the proper Securities from the orders provided by the user Algorithm.FromOrders(orders); // Initialize the algorithm Algorithm.Initialize(); Algorithm.PostInitialize(); // More initialization, this time with Algorithm and other misc. classes _resultHandler.Initialize(job, new Messaging.Messaging(), new Api.Api(), transactions); _resultHandler.SetAlgorithm(Algorithm, Algorithm.Portfolio.TotalPortfolioValue); Algorithm.Transactions.SetOrderProcessor(transactions); transactions.Initialize(Algorithm, new BacktestingBrokerage(Algorithm), _resultHandler); feed.Initialize(Algorithm, job, _resultHandler, null, null, null, _dataManager, null, null); // Begin setting up the currency conversion feed if needed var coreSecurities = Algorithm.Securities.Values.ToList(); if (coreSecurities.Any(x => x.Symbol.SecurityType == SecurityType.Forex || x.Symbol.SecurityType == SecurityType.Crypto)) { BaseSetupHandler.SetupCurrencyConversions(Algorithm, _dataManager.UniverseSelection); var conversionSecurities = Algorithm.Securities.Values.Where(s => !coreSecurities.Contains(s)).ToList(); // Skip the history request if we don't need to convert anything if (conversionSecurities.Any()) { // Point-in-time Slices to convert FX and Crypto currencies to the portfolio currency _conversionSlices = GetHistory(Algorithm, conversionSecurities, resolution); } } }
/// <summary> /// Initializes this history provider to work for the specified job /// </summary> /// <param name="job">The job</param> /// <param name="mapFileProvider">Provider used to get a map file resolver to handle equity mapping</param> /// <param name="factorFileProvider">Provider used to get factor files to handle equity price scaling</param> /// <param name="dataProvider">Provider used to get data when it is not present on disk</param> /// <param name="statusUpdate">Function used to send status updates</param> /// <param name="dataCacheProvider">Provider used to cache history data files</param> public override void Initialize(AlgorithmNodePacket job, IDataProvider dataProvider, IDataCacheProvider dataCacheProvider, IMapFileProvider mapFileProvider, IFactorFileProvider factorFileProvider, Action <int> statusUpdate) { _mapFileProvider = mapFileProvider; _factorFileProvider = factorFileProvider; _dataProvider = dataProvider; _dataCacheProvider = dataCacheProvider; }
/// <summary> /// Runs a single backtest/live job from the job queue /// </summary> /// <param name="job">The algorithm job to be processed</param> /// <param name="manager">The algorithm manager instance</param> /// <param name="assemblyPath">The path to the algorithm's assembly</param> /// <param name="workerThread">The worker thread instance</param> public void Run(AlgorithmNodePacket job, AlgorithmManager manager, string assemblyPath, WorkerThread workerThread) { var marketHoursDatabaseTask = Task.Run(() => StaticInitializations()); var algorithm = default(IAlgorithm); var algorithmManager = manager; try { //Reset thread holders. var initializeComplete = false; //-> Initialize messaging system SystemHandlers.Notify.SetAuthentication(job); //-> Set the result handler type for this algorithm job, and launch the associated result thread. AlgorithmHandlers.Results.Initialize(job, SystemHandlers.Notify, SystemHandlers.Api, AlgorithmHandlers.Transactions); IBrokerage brokerage = null; DataManager dataManager = null; IDataCacheProvider historyDataCacheProvider = null; var synchronizer = _liveMode ? new LiveSynchronizer() : new Synchronizer(); try { // we get the mhdb before creating the algorithm instance, // since the algorithm constructor will use it var marketHoursDatabase = marketHoursDatabaseTask.Result; AlgorithmHandlers.Setup.WorkerThread = workerThread; // Save algorithm to cache, load algorithm instance: algorithm = AlgorithmHandlers.Setup.CreateAlgorithmInstance(job, assemblyPath); // Set algorithm in ILeanManager SystemHandlers.LeanManager.SetAlgorithm(algorithm); // initialize the alphas handler with the algorithm instance AlgorithmHandlers.Alphas.Initialize(job, algorithm, SystemHandlers.Notify, SystemHandlers.Api, AlgorithmHandlers.Transactions); // initialize the object store AlgorithmHandlers.ObjectStore.Initialize(algorithm.Name, job.UserId, job.ProjectId, job.UserToken, job.Controls); // initialize the data permission manager AlgorithmHandlers.DataPermissionsManager.Initialize(job); // notify the user of any errors w/ object store persistence AlgorithmHandlers.ObjectStore.ErrorRaised += (sender, args) => algorithm.Debug($"ObjectStore Persistence Error: {args.Error.Message}"); // Initialize the brokerage IBrokerageFactory factory; brokerage = AlgorithmHandlers.Setup.CreateBrokerage(job, algorithm, out factory); var symbolPropertiesDatabase = SymbolPropertiesDatabase.FromDataFolder(); var registeredTypesProvider = new RegisteredSecurityDataTypesProvider(); var securityService = new SecurityService(algorithm.Portfolio.CashBook, marketHoursDatabase, symbolPropertiesDatabase, algorithm, registeredTypesProvider, new SecurityCacheProvider(algorithm.Portfolio)); algorithm.Securities.SetSecurityService(securityService); dataManager = new DataManager(AlgorithmHandlers.DataFeed, new UniverseSelection( algorithm, securityService, AlgorithmHandlers.DataPermissionsManager), algorithm, algorithm.TimeKeeper, marketHoursDatabase, _liveMode, registeredTypesProvider, AlgorithmHandlers.DataPermissionsManager); AlgorithmHandlers.Results.SetDataManager(dataManager); algorithm.SubscriptionManager.SetDataManager(dataManager); synchronizer.Initialize(algorithm, dataManager); // Initialize the data feed before we initialize so he can intercept added securities/universes via events AlgorithmHandlers.DataFeed.Initialize( algorithm, job, AlgorithmHandlers.Results, AlgorithmHandlers.MapFileProvider, AlgorithmHandlers.FactorFileProvider, AlgorithmHandlers.DataProvider, dataManager, (IDataFeedTimeProvider)synchronizer, AlgorithmHandlers.DataPermissionsManager.DataChannelProvider); // set the order processor on the transaction manager (needs to be done before initializing BrokerageHistoryProvider) algorithm.Transactions.SetOrderProcessor(AlgorithmHandlers.Transactions); // set the history provider before setting up the algorithm var historyProvider = GetHistoryProvider(job.HistoryProvider); if (historyProvider is BrokerageHistoryProvider) { (historyProvider as BrokerageHistoryProvider).SetBrokerage(brokerage); } historyDataCacheProvider = new ZipDataCacheProvider(AlgorithmHandlers.DataProvider, isDataEphemeral: _liveMode); historyProvider.Initialize( new HistoryProviderInitializeParameters( job, SystemHandlers.Api, AlgorithmHandlers.DataProvider, historyDataCacheProvider, AlgorithmHandlers.MapFileProvider, AlgorithmHandlers.FactorFileProvider, progress => { // send progress updates to the result handler only during initialization if (!algorithm.GetLocked() || algorithm.IsWarmingUp) { AlgorithmHandlers.Results.SendStatusUpdate(AlgorithmStatus.History, Invariant($"Processing history {progress}%...")); } }, // disable parallel history requests for live trading parallelHistoryRequestsEnabled: !_liveMode, dataPermissionManager: AlgorithmHandlers.DataPermissionsManager ) ); historyProvider.InvalidConfigurationDetected += (sender, args) => { AlgorithmHandlers.Results.ErrorMessage(args.Message); }; historyProvider.NumericalPrecisionLimited += (sender, args) => { AlgorithmHandlers.Results.DebugMessage(args.Message); }; historyProvider.DownloadFailed += (sender, args) => { AlgorithmHandlers.Results.ErrorMessage(args.Message, args.StackTrace); }; historyProvider.ReaderErrorDetected += (sender, args) => { AlgorithmHandlers.Results.RuntimeError(args.Message, args.StackTrace); }; algorithm.HistoryProvider = historyProvider; // initialize the default brokerage message handler algorithm.BrokerageMessageHandler = factory.CreateBrokerageMessageHandler(algorithm, job, SystemHandlers.Api); //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method. initializeComplete = AlgorithmHandlers.Setup.Setup(new SetupHandlerParameters(dataManager.UniverseSelection, algorithm, brokerage, job, AlgorithmHandlers.Results, AlgorithmHandlers.Transactions, AlgorithmHandlers.RealTime, AlgorithmHandlers.ObjectStore)); // set this again now that we've actually added securities AlgorithmHandlers.Results.SetAlgorithm(algorithm, AlgorithmHandlers.Setup.StartingPortfolioValue); // alpha handler needs start/end dates to determine sample step sizes AlgorithmHandlers.Alphas.OnAfterAlgorithmInitialized(algorithm); //If there are any reasons it failed, pass these back to the IDE. if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || AlgorithmHandlers.Setup.Errors.Count > 0) { initializeComplete = false; //Get all the error messages: internal in algorithm and external in setup handler. var errorMessage = string.Join(",", algorithm.ErrorMessages); errorMessage += string.Join(",", AlgorithmHandlers.Setup.Errors.Select(e => { var message = e.Message; if (e.InnerException != null) { var err = _exceptionInterpreter.Value.Interpret(e.InnerException, _exceptionInterpreter.Value); message += _exceptionInterpreter.Value.GetExceptionMessageHeader(err); } return(message); })); Log.Error("Engine.Run(): " + errorMessage); AlgorithmHandlers.Results.RuntimeError(errorMessage); SystemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, errorMessage); } } catch (Exception err) { Log.Error(err); var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err; AlgorithmHandlers.Results.RuntimeError(runtimeMessage, err.ToString()); SystemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage); } // log the job endpoints Log.Trace("JOB HANDLERS: "); Log.Trace(" DataFeed: " + AlgorithmHandlers.DataFeed.GetType().FullName); Log.Trace(" Setup: " + AlgorithmHandlers.Setup.GetType().FullName); Log.Trace(" RealTime: " + AlgorithmHandlers.RealTime.GetType().FullName); Log.Trace(" Results: " + AlgorithmHandlers.Results.GetType().FullName); Log.Trace(" Transactions: " + AlgorithmHandlers.Transactions.GetType().FullName); Log.Trace(" Alpha: " + AlgorithmHandlers.Alphas.GetType().FullName); Log.Trace(" ObjectStore: " + AlgorithmHandlers.ObjectStore.GetType().FullName); if (algorithm?.HistoryProvider != null) { Log.Trace(" History Provider: " + algorithm.HistoryProvider.GetType().FullName); } if (job is LiveNodePacket) { Log.Trace(" Brokerage: " + brokerage?.GetType().FullName); } //-> Using the job + initialization: load the designated handlers: if (initializeComplete) { // notify the LEAN manager that the algorithm is initialized and starting SystemHandlers.LeanManager.OnAlgorithmStart(); //-> Reset the backtest stopwatch; we're now running the algorithm. var startTime = DateTime.UtcNow; //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates. algorithm.SetAlgorithmId(job.AlgorithmId); algorithm.SetLocked(); //Load the associated handlers for transaction and realtime events: AlgorithmHandlers.Transactions.Initialize(algorithm, brokerage, AlgorithmHandlers.Results); AlgorithmHandlers.RealTime.Setup(algorithm, job, AlgorithmHandlers.Results, SystemHandlers.Api, algorithmManager.TimeLimit); // wire up the brokerage message handler brokerage.Message += (sender, message) => { algorithm.BrokerageMessageHandler.Handle(message); // fire brokerage message events algorithm.OnBrokerageMessage(message); switch (message.Type) { case BrokerageMessageType.Disconnect: algorithm.OnBrokerageDisconnect(); break; case BrokerageMessageType.Reconnect: algorithm.OnBrokerageReconnect(); break; } }; //Send status to user the algorithm is now executing. AlgorithmHandlers.Results.SendStatusUpdate(AlgorithmStatus.Running); // Result manager scanning message queue: (started earlier) AlgorithmHandlers.Results.DebugMessage( $"Launching analysis for {job.AlgorithmId} with LEAN Engine v{Globals.Version}"); try { //Create a new engine isolator class var isolator = new Isolator(); // Execute the Algorithm Code: var complete = isolator.ExecuteWithTimeLimit(AlgorithmHandlers.Setup.MaximumRuntime, algorithmManager.TimeLimit.IsWithinLimit, () => { try { //Run Algorithm Job: // -> Using this Data Feed, // -> Send Orders to this TransactionHandler, // -> Send Results to ResultHandler. algorithmManager.Run(job, algorithm, synchronizer, AlgorithmHandlers.Transactions, AlgorithmHandlers.Results, AlgorithmHandlers.RealTime, SystemHandlers.LeanManager, AlgorithmHandlers.Alphas, isolator.CancellationToken); } catch (Exception err) { //Debugging at this level is difficult, stack trace needed. Log.Error(err); algorithm.RunTimeError = err; algorithmManager.SetStatus(AlgorithmStatus.RuntimeError); return; } Log.Trace("Engine.Run(): Exiting Algorithm Manager"); }, job.Controls.RamAllocation, workerThread: workerThread); if (!complete) { Log.Error("Engine.Main(): Failed to complete in time: " + AlgorithmHandlers.Setup.MaximumRuntime.ToStringInvariant("F")); throw new Exception("Failed to complete algorithm within " + AlgorithmHandlers.Setup.MaximumRuntime.ToStringInvariant("F") + " seconds. Please make it run faster."); } // Algorithm runtime error: if (algorithm.RunTimeError != null) { HandleAlgorithmError(job, algorithm.RunTimeError); } } catch (Exception err) { //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed. algorithm.RunTimeError = err; algorithm.SetStatus(AlgorithmStatus.RuntimeError); HandleAlgorithmError(job, err); } // notify the LEAN manager that the algorithm has finished SystemHandlers.LeanManager.OnAlgorithmEnd(); try { var csvTransactionsFileName = Config.Get("transaction-log"); if (!string.IsNullOrEmpty(csvTransactionsFileName)) { SaveListOfTrades(AlgorithmHandlers.Transactions, csvTransactionsFileName); } if (!_liveMode) { //Diagnostics Completed, Send Result Packet: var totalSeconds = (DateTime.UtcNow - startTime).TotalSeconds; var dataPoints = algorithmManager.DataPoints + algorithm.HistoryProvider.DataPointCount; var kps = dataPoints / (double)1000 / totalSeconds; AlgorithmHandlers.Results.DebugMessage($"Algorithm Id:({job.AlgorithmId}) completed in {totalSeconds:F2} seconds at {kps:F0}k data points per second. Processing total of {dataPoints:N0} data points."); } } catch (Exception err) { Log.Error(err, "Error sending analysis results"); } //Before we return, send terminate commands to close up the threads AlgorithmHandlers.Transactions.Exit(); AlgorithmHandlers.RealTime.Exit(); dataManager?.RemoveAllSubscriptions(); workerThread?.Dispose(); } // Close data feed, alphas. Could be running even if algorithm initialization failed AlgorithmHandlers.DataFeed.Exit(); AlgorithmHandlers.Alphas.Exit(); //Close result handler: AlgorithmHandlers.Results.Exit(); //Wait for the threads to complete: var millisecondInterval = 10; var millisecondTotalWait = 0; while ((AlgorithmHandlers.Results.IsActive || (AlgorithmHandlers.Transactions != null && AlgorithmHandlers.Transactions.IsActive) || (AlgorithmHandlers.DataFeed != null && AlgorithmHandlers.DataFeed.IsActive) || (AlgorithmHandlers.RealTime != null && AlgorithmHandlers.RealTime.IsActive) || (AlgorithmHandlers.Alphas != null && AlgorithmHandlers.Alphas.IsActive)) && millisecondTotalWait < 30 * 1000) { Thread.Sleep(millisecondInterval); if (millisecondTotalWait % (millisecondInterval * 10) == 0) { Log.Trace("Waiting for threads to exit..."); } millisecondTotalWait += millisecondInterval; } if (brokerage != null) { Log.Trace("Engine.Run(): Disconnecting from brokerage..."); brokerage.Disconnect(); brokerage.Dispose(); } if (AlgorithmHandlers.Setup != null) { Log.Trace("Engine.Run(): Disposing of setup handler..."); AlgorithmHandlers.Setup.Dispose(); } historyDataCacheProvider.DisposeSafely(); Log.Trace("Engine.Main(): Analysis Completed and Results Posted."); } catch (Exception err) { Log.Error(err, "Error running algorithm"); } finally { //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions: if (_liveMode && algorithmManager.State != AlgorithmStatus.Running && algorithmManager.State != AlgorithmStatus.RuntimeError) { SystemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, algorithmManager.State); } AlgorithmHandlers.Results.Exit(); AlgorithmHandlers.DataFeed.Exit(); AlgorithmHandlers.Transactions.Exit(); AlgorithmHandlers.RealTime.Exit(); } }
/// <summary> /// <see cref = "QuantBook" /> constructor. /// Provides access to data for quantitative analysis /// </summary> public QuantBook() : base() { try { using (Py.GIL()) { _pandas = Py.Import("pandas"); } // By default, set start date to end data which is yesterday SetStartDate(EndDate); // Sets PandasConverter SetPandasConverter(); // Initialize History Provider var composer = new Composer(); var algorithmHandlers = LeanEngineAlgorithmHandlers.FromConfiguration(composer); var systemHandlers = LeanEngineSystemHandlers.FromConfiguration(composer); // init the API systemHandlers.Initialize(); systemHandlers.LeanManager.Initialize(systemHandlers, algorithmHandlers, new BacktestNodePacket(), new AlgorithmManager(false)); systemHandlers.LeanManager.SetAlgorithm(this); algorithmHandlers.ObjectStore.Initialize("QuantBook", Config.GetInt("job-user-id"), Config.GetInt("project-id"), Config.Get("api-access-token"), new Controls { // if <= 0 we disable periodic persistence and make it synchronous PersistenceIntervalSeconds = -1, StorageLimitMB = Config.GetInt("storage-limit-mb", 5), StorageFileCount = Config.GetInt("storage-file-count", 100), StoragePermissions = (FileAccess)Config.GetInt("storage-permissions", (int)FileAccess.ReadWrite) }); SetObjectStore(algorithmHandlers.ObjectStore); _dataCacheProvider = new ZipDataCacheProvider(algorithmHandlers.DataProvider); var symbolPropertiesDataBase = SymbolPropertiesDatabase.FromDataFolder(); var registeredTypes = new RegisteredSecurityDataTypesProvider(); var securityService = new SecurityService(Portfolio.CashBook, MarketHoursDatabase, symbolPropertiesDataBase, this, registeredTypes, new SecurityCacheProvider(Portfolio)); Securities.SetSecurityService(securityService); SubscriptionManager.SetDataManager( new DataManager(new NullDataFeed(), new UniverseSelection(this, securityService), this, TimeKeeper, MarketHoursDatabase, false, registeredTypes)); var mapFileProvider = algorithmHandlers.MapFileProvider; HistoryProvider = composer.GetExportedValueByTypeName <IHistoryProvider>(Config.Get("history-provider", "SubscriptionDataReaderHistoryProvider")); HistoryProvider.Initialize( new HistoryProviderInitializeParameters( null, null, algorithmHandlers.DataProvider, _dataCacheProvider, mapFileProvider, algorithmHandlers.FactorFileProvider, null, true ) ); SetOptionChainProvider(new CachingOptionChainProvider(new BacktestingOptionChainProvider())); SetFutureChainProvider(new CachingFutureChainProvider(new BacktestingFutureChainProvider())); } catch (Exception exception) { throw new Exception("QuantBook.Main(): " + exception); } }
/// <summary> /// Subscription data reader takes a subscription request, loads the type, accepts the data source and enumerate on the results. /// </summary> /// <param name="config">Subscription configuration object</param> /// <param name="periodStart">Start date for the data request/backtest</param> /// <param name="periodFinish">Finish date for the data request/backtest</param> /// <param name="resultHandler">Result handler used to push error messages and perform sampling on skipped days</param> /// <param name="mapFileResolver">Used for resolving the correct map files</param> /// <param name="factorFileProvider">Used for getting factor files</param> /// <param name="dataProvider">Used for getting files not present on disk</param> /// <param name="dataCacheProvider">Used for caching files</param> /// <param name="tradeableDates">Defines the dates for which we'll request data, in order, in the security's exchange time zone</param> /// <param name="isLiveMode">True if we're in live mode, false otherwise</param> /// <param name="includeAuxilliaryData">True if we want to emit aux data, false to only emit price data</param> public SubscriptionDataReader(SubscriptionDataConfig config, DateTime periodStart, DateTime periodFinish, IResultHandler resultHandler, MapFileResolver mapFileResolver, IFactorFileProvider factorFileProvider, IDataProvider dataProvider, IEnumerable <DateTime> tradeableDates, bool isLiveMode, IDataCacheProvider dataCacheProvider, bool includeAuxilliaryData = true) { //Save configuration of data-subscription: _config = config; _auxiliaryData = new Queue <BaseData>(); //Save Start and End Dates: _periodStart = periodStart; _periodFinish = periodFinish; _dataProvider = dataProvider; _dataCacheProvider = dataCacheProvider; //Save access to securities _isLiveMode = isLiveMode; _includeAuxilliaryData = includeAuxilliaryData; //Save the type of data we'll be getting from the source. //Create the dynamic type-activators: var objectActivator = ObjectActivator.GetActivator(config.Type); _resultHandler = resultHandler; _tradeableDates = tradeableDates.GetEnumerator(); if (objectActivator == null) { _resultHandler.ErrorMessage("Custom data type '" + config.Type.Name + "' missing parameterless constructor E.g. public " + config.Type.Name + "() { }"); _endOfStream = true; return; } //Create an instance of the "Type": var userObj = objectActivator.Invoke(new object[] { }); _dataFactory = userObj as BaseData; //If its quandl set the access token in data factory: var quandl = _dataFactory as Quandl; if (quandl != null) { if (!Quandl.IsAuthCodeSet) { Quandl.SetAuthCode(Config.Get("quandl-auth-token")); } } _factorFile = new FactorFile(config.Symbol.Value, new List <FactorFileRow>()); _mapFile = new MapFile(config.Symbol.Value, new List <MapFileRow>()); // load up the map and factor files for equities if (!config.IsCustomData && config.SecurityType == SecurityType.Equity) { try { var mapFile = mapFileResolver.ResolveMapFile(config.Symbol.ID.Symbol, config.Symbol.ID.Date); // only take the resolved map file if it has data, otherwise we'll use the empty one we defined above if (mapFile.Any()) { _mapFile = mapFile; } var factorFile = factorFileProvider.Get(_config.Symbol); _hasScaleFactors = factorFile != null; if (_hasScaleFactors) { _factorFile = factorFile; } } catch (Exception err) { Log.Error(err, "Fetching Price/Map Factors: " + config.Symbol.ID + ": "); } } // load up the map and factor files for underlying of equity option if (!config.IsCustomData && config.SecurityType == SecurityType.Option) { try { var mapFile = mapFileResolver.ResolveMapFile(config.Symbol.Underlying.ID.Symbol, config.Symbol.Underlying.ID.Date); // only take the resolved map file if it has data, otherwise we'll use the empty one we defined above if (mapFile.Any()) { _mapFile = mapFile; } } catch (Exception err) { Log.Error(err, "Map Factors: " + config.Symbol.ID + ": "); } } _subscriptionFactoryEnumerator = ResolveDataEnumerator(true); }
/// <summary> /// Create a new lean data writer to this base data directory. /// </summary> /// <param name="dataDirectory">Base data directory</param> /// <param name="resolution">Resolution of the desired output data</param> /// <param name="securityType">The security type</param> /// <param name="tickType">The tick type</param> public LeanDataWriter(string dataDirectory, Resolution resolution, SecurityType securityType, TickType tickType, IDataCacheProvider dataCacheProvider = null) { _dataDirectory = dataDirectory; _resolution = resolution; _securityType = securityType; _tickType = tickType; _dataCacheProvider = dataCacheProvider ?? new DiskDataCacheProvider(); }
/// <summary> /// Creates a new instance /// </summary> /// <param name="dataCacheProvider">The data cache provider instance to use</param> public LiveFutureChainProvider(IDataCacheProvider dataCacheProvider) : base(dataCacheProvider) { }