private static async Task DownloadDatasetDataAsync(QuandlDataset dataset, int to) { using (PatientWebClient client = new PatientWebClient()) { try { string data = await client.DownloadStringTaskAsync(new Uri("https://www.quandl.com/api/v3/datasets/" + dataset.DatabaseCode + "/" + dataset.DatasetCode + "/data.json?api_key=" + Utils.Constants.API_KEY + "&start_date=" + dataset.LastFetch.GetValueOrDefault(DateTime.MinValue).AddDays(1).ToString("yyyy-MM-dd"))); // Add one day because I dont want to include the current newest in the json DataResponse response = JsonConvert.DeserializeObject <DataResponse>(data, new JsonSerializerSettings { ContractResolver = Utils.Converters.MakeUnderscoreContract() }); QuandlDatasetData datasetData = response.DatasetData; datasetData.SetBaseDataset(dataset); datasetsFetched++; Utils.ConsoleInformer.PrintProgress("1C", "Fetching dataset [" + dataset.DatasetCode + "]: ", Utils.Helpers.GetPercent(datasetsFetched, to).ToString() + "%"); // Replace old uncomplete dataset with new one //ReplaceCompleteDataset(datasetData); //AddCompleteDataset(datasetData); // Insert QuandlDatasetDataGroup datasetGroup = new QuandlDatasetDataGroup() { DatabaseCode = datasetData.DatabaseCode, Datasets = new List <QuandlDatasetData>() }; datasetGroup.Datasets.Add(datasetData); PostgresHelpers.QuandlDatasetActions.InsertQuandlDatasetsDataGroup(datasetGroup); } catch (Exception e) { // Add to fetch later (Create datasetgroup if doesnt exists in failed list) if (!failedToFetch.Exists(d => d.DatabaseCode == dataset.DatabaseCode)) { failedToFetch.Add(new QuandlDatasetGroup() { DatabaseCode = dataset.DatabaseCode, Datasets = new List <QuandlDataset>() }); } failedToFetch.Find(d => d.DatabaseCode == dataset.DatabaseCode).Datasets.Add(dataset); if (e.Message.Contains("(429)")) { // Print only once if (!blocked) { Utils.ConsoleInformer.Inform("Looks like quandl just blocked you"); } blocked = true; } // Log Utils.Helpers.Log("Failed to fetch data: from dataset: [" + dataset.DatabaseCode + "/" + dataset.DatasetCode + "] Will try to recover", "Ex: " + e.Message); //errors.Add(new Tuple<string, string>("Failed to fetch data: from dataset: [" + dataset.DatabaseCode + "/" + dataset.DatasetCode + "]", "Ex: " + e.Message)); } } }
private static async Task StartFetching() { Console.WriteLine("\nSelected datasets models - quantity:"); datasetsGroups.ForEach(d => Console.WriteLine(" -[DB Model] " + d.DatabaseCode + " - " + d.Datasets.Count) ); Console.WriteLine(); Console.WriteLine("\nDetecting newest data available:"); foreach (QuandlDatasetGroup datasetGroup in datasetsGroups) { List <Tuple <DateTime, string> > datasetNewestDateList = PostgresHelpers.QuandlDatasetActions.GetNewestImportedData(datasetGroup); // Item1 = Newest date of data // Item2 = Dataset code foreach (var tuple in datasetNewestDateList) { // Will only add those who dataset is imported QuandlDataset dataset = datasetGroup.Datasets.Find(d => d.DatasetCode == tuple.Item2); if (dataset != null) { dataset.LastFetch = tuple.Item1; } } } int count = 0; foreach (QuandlDatasetGroup datasetGroup in datasetsGroups) { // Update groups to fetched count count++; // Identify current group Utils.ConsoleInformer.InformSimple("Group model: [" + datasetGroup.DatabaseCode + "]. Group:" + count + "/" + datasetsGroups.Count); // Make datasets model tables Console.WriteLine("Creating unique table model for datasets:"); SchemaActions.CreateQuandlDatasetDataTable(datasetGroup); Console.WriteLine(); // Request all datasets from group await DownloadDatasetsDataAsync(datasetGroup, datasetGroup.Datasets.Count); } if (failedToFetch.Any()) { datasetsGroups.Clear(); datasetsGroups.AddRange(failedToFetch); Console.WriteLine("\n######################################################################"); Console.WriteLine("\nFetching failed datasets data"); Console.WriteLine("Waiting 11 minutes (quandl limitation) before fetching remaning ones"); for (int totalSeconds = 11 * 60; totalSeconds >= 0; totalSeconds--) { int seconds = totalSeconds % 60; int minutes = totalSeconds / 60; string time = minutes + ":" + seconds; Console.CursorLeft = 0; Console.Write("{0} ", time); // Add space to make sure to override previous contents System.Threading.Thread.Sleep(1000); } failedToFetch.Clear(); await StartFetching(); } // Make datasets model tables //PostgresHelpers.QuandlDatasetActions.InsertQuandlDatasetsData(datasetsDataGroups); }
/** * Dataset data methos */ public static List <QuandlDatasetGroup> GetImportedDatasets() { // This query does not takes in count if the de dataset's database is imported too //string query = @"SELECT " + QuandlDataset.GetColumnsForQuery() + " " + // @"FROM quandl.datasets // WHERE import = true"; // Query string query = @"SELECT " + QuandlDataset.GetColumnsForQuerySuffixed("ds") + @" FROM quandl.databases INNER JOIN quandl.datasets ds ON (quandl.databases.databasecode = ds.databasecode) WHERE quandl.databases.import = true AND ds.import = true"; List <QuandlDatasetGroup> datasetsGroups = new List <QuandlDatasetGroup>(); using (var conn = new NpgsqlConnection(Constants.CONNECTION_STRING)) { using (var cmd = new NpgsqlCommand(query)) { // Open connection // =============================================================== conn.Open(); cmd.Connection = conn; try { // Execute the query and obtain a result set NpgsqlDataReader dr = cmd.ExecuteReader(); // Each row while (dr.Read()) { // Add each dataset to its own group (Database code) QuandlDataset dataset = QuandlDataset.MakeQuandlDataset(dr); // If group doesnt exists, create it if (!datasetsGroups.Exists(d => d.DatabaseCode == dataset.DatabaseCode)) { datasetsGroups.Add(new QuandlDatasetGroup() { DatabaseCode = dataset.DatabaseCode, Datasets = new List <QuandlDataset>() }); } datasetsGroups.Find(d => d.DatabaseCode == dataset.DatabaseCode).Datasets.Add(dataset); } } catch (Exception ex) { conn.Close(); Helpers.ExitWithError(ex.Message); } ConsoleInformer.PrintProgress("0C", "Querying imported datasets: ", "100%"); // Close connection // =============================================================== conn.Close(); } } return(datasetsGroups); }