/// <summary> /// Source URL's of Backtesting and Live Streams: /// </summary> public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { var source = ""; switch (datafeed) { //Historical backtesting data: case DataFeedEndpoint.Backtesting: source = "http://www.quandl.com/api/v1/datasets/BITCOIN/BITSTAMPUSD.csv?sort_order=asc"; break; //Live socket for bitcoin prices: case DataFeedEndpoint.Tradier: case DataFeedEndpoint.LiveTrading: //Live refreshing endpoint. source = "https://www.bitstamp.net/api/ticker/"; break; } return source; }
/// <summary> /// Parse a line from the CSV's into our trade bars. /// </summary> /// <param name="config">QC SecurityType of the tradebar</param> /// <param name="baseDate">Base date of this tick</param> /// <param name="line">CSV from data files.</param> /// <param name="datafeed">Datafeed this csv line is sourced from</param> public TradeBar(SubscriptionDataConfig config, string line, DateTime baseDate, DataFeedEndpoint datafeed = DataFeedEndpoint.Backtesting) { try { //Parse the data into a trade bar: string[] csv = line.Split(','); const decimal scaleFactor = 10000m; base.Symbol = config.Symbol; switch (config.Security) { //Equity File Data Format: case SecurityType.Equity: base.Time = baseDate.Date.AddMilliseconds(Convert.ToInt32(csv[0])); Open = (csv[1].ToDecimal() / scaleFactor) * config.PriceScaleFactor; // Convert.ToDecimal(csv[1]) / scaleFactor; High = (csv[2].ToDecimal() / scaleFactor) * config.PriceScaleFactor; // Using custom "ToDecimal" conversion for speed. Low = (csv[3].ToDecimal() / scaleFactor) * config.PriceScaleFactor; Close = (csv[4].ToDecimal() / scaleFactor) * config.PriceScaleFactor; Volume = Convert.ToInt64(csv[5]); break; //FOREX has a different data file format: case SecurityType.Forex: base.Time = DateTime.ParseExact(csv[0], "yyyyMMdd HH:mm:ss.ffff", CultureInfo.InvariantCulture); Open = csv[1].ToDecimal(); High = csv[2].ToDecimal(); Low = csv[3].ToDecimal(); Close = csv[4].ToDecimal(); break; } base.Value = Close; } catch (Exception err) { Log.Error("DataModels: TradeBar(): Error Initializing - " + config.Security + " - " + err.Message + " - " + line); } }
/// <summary> /// 2. RETURN THE STRING URL SOURCE LOCATION FOR YOUR DATA: /// This is a powerful and dynamic select source file method. If you have a large dataset, 10+mb we recommend you break it into smaller files. E.g. One zip per year. /// We can accept raw text or ZIP files. We read the file extension to determine if it is a zip file. /// </summary> /// <param name="config">Subscription data, symbol name, data type</param> /// <param name="date">Current date we're requesting. This allows you to break up the data source into daily files.</param> /// <param name="datafeed">Datafeed type: Backtesting or the Live data broker who will provide live data. You can specify a different source for live trading! </param> /// <returns>string URL end point.</returns> public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { switch (datafeed) { //Backtesting Data Source: Example of a data source which varies by day (commented out) default: case DataFeedEndpoint.Backtesting: //return "http://my-ftp-server.com/futures-data-" + date.ToString("Ymd") + ".zip"; // OR simply return a fixed small data file. Large files will slow down your backtest return "http://www.quandl.com/api/v1/datasets/BITCOIN/BITSTAMPUSD.csv?sort_order=asc"; case DataFeedEndpoint.LiveTrading: //Alternative live socket data source for live trading (soon)/ return "...."; } }
/// <summary> /// 3. READER METHOD: Read 1 line from data source and convert it into Object. /// Each line of the CSV File is presented in here. The backend downloads your file, loads it into memory and then line by line /// feeds it into your algorithm /// </summary> /// <param name="line">string line from the data source file submitted above</param> /// <param name="config">Subscription data, symbol name, data type</param> /// <param name="date">Current date we're requesting. This allows you to break up the data source into daily files.</param> /// <param name="datafeed">Datafeed type - Backtesting or LiveTrading</param> /// <returns>New Weather Object which extends BaseData.</returns> public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { //New weather object Bitcoin coin = new Bitcoin(); try { //Example File Format: //Date, Open High Low Close Volume (BTC) Volume (Currency) Weighted Price //2011-09-13 5.8 6.0 5.65 5.97 58.37138238, 346.0973893944 5.929230648356 string[] data = line.Split(','); coin.Time = DateTime.Parse(data[0]); coin.Open = Convert.ToDecimal(data[1]); coin.High = Convert.ToDecimal(data[2]); coin.Low = Convert.ToDecimal(data[3]); coin.Close = Convert.ToDecimal(data[4]); coin.VolumeBTC = Convert.ToDecimal(data[5]); coin.VolumeUSD = Convert.ToDecimal(data[6]); coin.WeightedPrice = Convert.ToDecimal(data[7]); coin.Symbol = "BTC"; coin.Value = coin.Close; } catch { /* Do nothing, skip first title row */ } return coin; }
/// <summary> /// Backtesting & Live Bitcoin Decoder: /// </summary> public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { Bitcoin coin = new Bitcoin(); switch (datafeed) { //Example Line Format: //Date Open High Low Close Volume (BTC) Volume (Currency) Weighted Price //2011-09-13 5.8 6.0 5.65 5.97 58.37138238, 346.0973893944 5.929230648356 case DataFeedEndpoint.Backtesting: try { string[] data = line.Split(','); coin.Time = DateTime.Parse(data[0]); coin.Open = Convert.ToDecimal(data[1]); coin.High = Convert.ToDecimal(data[2]); coin.Low = Convert.ToDecimal(data[3]); coin.Close = Convert.ToDecimal(data[4]); coin.VolumeBTC = Convert.ToDecimal(data[5]); coin.WeightedPrice = Convert.ToDecimal(data[7]); coin.Symbol = "BTC"; coin.Value = coin.Close; } catch { /* Do nothing, skip first title row */ } break; //Example Line Format: //{"high": "441.00", "last": "421.86", "timestamp": "1411606877", "bid": "421.96", "vwap": "428.58", "volume": "14120.40683975", "low": "418.83", "ask": "421.99"} case DataFeedEndpoint.Tradier: case DataFeedEndpoint.LiveTrading: try { var liveBTC = JsonConvert.DeserializeObject<LiveBitcoin>(line); coin.Time = DateTime.Now; coin.Open = liveBTC.Last; coin.High = liveBTC.High; coin.Low = liveBTC.Low; coin.Close = liveBTC.Last; coin.VolumeBTC = liveBTC.Volume; coin.WeightedPrice = liveBTC.VWAP; coin.Symbol = "BTC"; coin.Value = coin.Close; } catch { /* Do nothing, possible error in json decoding */ } break; } return coin; }
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// TradeBar Reader: Fetch the data from the QC storage and feed it line by line into the engine. /// </summary> /// <param name="datafeed">Destination for the this datafeed - live or backtesting</param> /// <param name="config">Symbols, Resolution, DataType, </param> /// <param name="line">Line from the data file requested</param> /// <param name="date">Date of this reader request</param> /// <returns>Enumerable iterator for returning each line of the required data.</returns> public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { //Initialize: TradeBar _tradeBar = new TradeBar(); //Select the URL source of the data depending on where the system is trading. switch (datafeed) { //Amazon S3 Backtesting Data: case DataFeedEndpoint.Backtesting: //Create a new instance of our tradebar: _tradeBar = new TradeBar(config, line, date, datafeed); break; //Localhost Data Source case DataFeedEndpoint.FileSystem: //Create a new instance of our tradebar: _tradeBar = new TradeBar(config, line, date, datafeed); break; //QuantConnect Live Tick Stream: case DataFeedEndpoint.LiveTrading: break; } //Return initialized TradeBar: return _tradeBar; }
/******************************************************** * CLASS PROPERTIES *********************************************************/ /******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Reader Method :: using set of arguements we specify read out type. Enumerate /// until the end of the data stream or file. E.g. Read CSV file line by line and convert /// into data types. /// </summary> /// <returns>BaseData type set by Subscription Method.</returns> public abstract BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed);
/// <summary> /// Parse a tick data line from Zip files. /// </summary> /// <param name="line">CSV Line</param> /// <param name="date">Base date for the tick</param> /// <param name="config">Subscription configuration object</param> /// <param name="datafeed">Datafeed for </param> public Tick(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { try { string[] csv = line.Split(','); switch (config.Security) { case SecurityType.Equity: base.Symbol = config.Symbol; base.Time = date.Date.AddMilliseconds(Convert.ToInt64(csv[0])); base.Value = (csv[1].ToDecimal() / 10000m) * config.PriceScaleFactor; base.DataType = MarketDataType.Tick; this.TickType = TickType.Trade; this.Quantity = Convert.ToInt32(csv[2]); if (csv.Length > 3) { this.Exchange = csv[3]; this.SaleCondition = csv[4]; this.Suspicious = (csv[5] == "1") ? true : false; } break; case SecurityType.Forex: base.Symbol = config.Symbol; TickType = TickType.Quote; Time = DateTime.ParseExact(csv[0], "yyyyMMdd HH:mm:ss.ffff", CultureInfo.InvariantCulture); BidPrice = csv[1].ToDecimal(); AskPrice = csv[2].ToDecimal(); Value = BidPrice + (AskPrice - BidPrice) / 2; break; } } catch (Exception err) { Log.Error("Error Generating Tick: " + err.Message); } }
public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { Futures contract = new Futures(); try { string[] data = line.Split(','); contract.Time = DateTime.ParseExact(data[0], "M/d/yyyy", CultureInfo.InvariantCulture); contract.Open = Convert.ToDecimal(data[1]); contract.High = Convert.ToDecimal(data[2]); contract.Low = Convert.ToDecimal(data[3]); contract.Close = Convert.ToDecimal(data[4]); contract.Symbol = config.Symbol; contract.Value = contract.Close; } catch { /* Do nothing, skip first title row */ } return contract; }
public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { string sourceURL = ""; if (config.Symbol == "CL1") { sourceURL = "https://www.dropbox.com/s/cf1c9wehmc9q8ar/CL1.csv?dl=1"; } if (config.Symbol == "CL2") { sourceURL = "https://www.dropbox.com/s/uitywhz18qkiq1m/CL2.csv?dl=1"; } return sourceURL; }
public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { //New USDINR object DollarRupee currency = new DollarRupee(); try { string[] data = line.Split(','); currency.Time = DateTime.Parse(data[0]); currency.Close = Convert.ToDecimal(data[1]); currency.Symbol = "USDINR"; currency.Value = currency.Close; } catch { } return currency; }
public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { return "https://www.dropbox.com/s/m6ecmkg9aijwzy2/USDINR.csv?dl=1"; }
public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { //New Nifty object Nifty index = new Nifty(); try { //Example File Format: //Date, Open High Low Close Volume Turnover //2011-09-13 7792.9 7799.9 7722.65 7748.7 116534670 6107.78 string[] data = line.Split(','); index.Time = DateTime.Parse(data[0]); index.Open = Convert.ToDecimal(data[1]); index.High = Convert.ToDecimal(data[2]); index.Low = Convert.ToDecimal(data[3]); index.Close = Convert.ToDecimal(data[4]); index.Symbol = "NIFTY"; index.Value = index.Close; } catch { } return index; }
public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { return "https://www.dropbox.com/s/rsmg44jr6wexn2h/CNXNIFTY.csv?dl=1"; }
public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { return "https://www.quandl.com/api/v1/datasets/YAHOO/INDEX_VIX.csv?trim_start=2000-01-01&trim_end=2014-10-31&sort_order=asc&exclude_headers=true"; }
public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { VIX fear = new VIX(); //try //{ //Date Open High Low Close Volume Adjusted Close //10/27/2014 17.24 17.87 16 16.04 0 16.04 string[] data = line.Split(','); fear.Time = DateTime.ParseExact(data[0], "yyyy-MM-dd", CultureInfo.InvariantCulture); fear.Open = Convert.ToDecimal(data[1]); fear.High = Convert.ToDecimal(data[2]); fear.Low = Convert.ToDecimal(data[3]); fear.Close = Convert.ToDecimal(data[4]); fear.Symbol = "VIX"; fear.Value = fear.Close; //} //catch //{ } return fear; }
/// <summary> /// Return the URL string source of the file. This will be converted to a stream /// </summary> /// <param name="datafeed">Type of datafeed we're reqesting - backtest or live</param> /// <param name="config">Configuration object</param> /// <param name="date">Date of this source file</param> /// <returns>String URL of source file.</returns> public abstract string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed);
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Tick Implementation of Reader Method: read a line and convert it to a tick. /// </summary> /// <param name="datafeed">Source of the datafeed</param> /// <param name="config">Configuration object for algorith</param> /// <param name="line">Line of the datafeed</param> /// <param name="date">Date of this reader request</param> /// <returns>New Initialized tick</returns> public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { Tick _tick = new Tick(); //Select the URL source of the data depending on where the system is trading. switch (datafeed) { //Local File System Storage and Backtesting QC Data Store Feed use same files: case DataFeedEndpoint.FileSystem: case DataFeedEndpoint.Backtesting: //Create a new instance of our tradebar: _tick = new Tick(config, line, date, datafeed); break; case DataFeedEndpoint.LiveTrading: break; case DataFeedEndpoint.Tradier: break; } return _tick; }
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// TradeBar Reader: Fetch the data from the QC storage and feed it line by line into the engine. /// </summary> /// <param name="datafeed">Where are we getting this datafeed from - backtesing or live.</param> /// <param name="config">Symbols, Resolution, DataType, </param> /// <param name="line">Line from the data file requested</param> /// <param name="date">Date of the reader request, only used when the source file changes daily.</param> /// <returns>Enumerable iterator for returning each line of the required data.</returns> public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { throw new Exception("TradeBars class not implemented. Use TradeBar reader instead."); }
/// <summary> /// Get Source File URL for this TradeBar subscription request /// </summary> /// <param name="datafeed">Source of the datafeed / type of strings we'll be receiving</param> /// <param name="config">Configuration for the subscription</param> /// <param name="date">Date of the source file requested.</param> /// <returns>String URL Source File</returns> public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { throw new NotImplementedException(); }
/// <summary> /// Get Source for Custom Data File /// >> What source file location would you prefer for each type of usage: /// </summary> /// <param name="config">Configuration object</param> /// <param name="date">Date of this source request if source spread across multiple files</param> /// <param name="datafeed">Source of the datafeed</param> /// <returns>String source location of the file</returns> public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { string source = ""; switch (datafeed) { //Source location for backtesting. Commonly a dropbox or FTP link case DataFeedEndpoint.Backtesting: //E.g.: //source = @"https://www.dropbox.com/"; break; //Source location for local testing: Not yet released :) Coming soon. case DataFeedEndpoint.FileSystem: break; //Source location for live trading: do you have an endpoint for streaming data? case DataFeedEndpoint.LiveTrading: break; } return source; }
public override string GetSource(SubscriptionDataConfig config, DateTime date, DataFeedEndpoint datafeed) { return "https://www.dropbox.com/s/oiliumoyqqj1ovl/2013-cash.csv?dl=1"; }
/// <summary> /// Add Market Data Required - generic data typing support as long as Type implements IBaseData. /// </summary> /// <param name="dataType">Set the type of the data we're subscribing to.</param> /// <param name="security">Market Data Asset</param> /// <param name="symbol">Symbol of the asset we're like</param> /// <param name="resolution">Resolution of Asset Required</param> /// <param name="fillDataForward">when there is no data pass the last tradebar forward</param> /// <param name="extendedMarketHours">Request premarket data as well when true </param> public void Add(Type dataType, SecurityType security, string symbol, Resolution resolution = Resolution.Minute, bool fillDataForward = true, bool extendedMarketHours = false) { //Clean: symbol = symbol.ToUpper(); //Create: SubscriptionDataConfig newConfig = new SubscriptionDataConfig(dataType, security, symbol, resolution, fillDataForward, extendedMarketHours); //Add to subscription list: make sure we don't have his symbol: Subscriptions.Add(newConfig); }
public override BaseData Reader(SubscriptionDataConfig config, string line, DateTime date, DataFeedEndpoint datafeed) { //New Bitcoin object CashType cash = new CashType(); try { string[] data = line.Split(','); cash.Time = DateTime.ParseExact(data[0], "yyyy-MM-dd", CultureInfo.InvariantCulture); cash.Value = Convert.ToDecimal(data[1]); } catch { /* Do nothing, skip first title row */ } return cash; }