/// <summary> /// Used for getting SampleNames for comboBox /// </summary> /// <typeparam name="TData"></typeparam> /// <returns></returns> public List <string> GetRecipeList <TData>() { var lst = new List <string>(); try { string sql = $"SELECT DISTINCT SampleName FROM {_table} LIMIT 500"; var result = _client.ExecuteQuery(sql, parameters: null); foreach (var row in result) { lst.Add(row["SampleName"].ToString()); } } catch (TimeoutException) { } catch (Exception exc) { U.Logger.WriteLine(exc.ToString()); } if (lst.Contains("Testing")) { lst.Remove("Testing"); } var recipes = new List <string>(); for (int i = 0; i < lst.Count; i++) { if (!lst[i].Contains("_not_chosen")) { recipes.Add(lst[i]); } } return(recipes); }
public IEnumerable <TokenUsage> Get(TokenStatFilter filter) { string query = @"SELECT tt.token_address, t.name, COUNT(*) as transfer_count FROM `bigquery-public-data.ethereum_blockchain.token_transfers` as tt LEFT JOIN `bigquery-public-data.ethereum_blockchain.tokens` as t ON tt.token_address = t.address {0} GROUP BY tt.token_address, t.name ORDER BY 3 DESC LIMIT 1000"; string where = ""; if (filter != TokenStatFilter.Default) { where = $"WHERE DATE(tt.block_timestamp) > DATE_ADD(current_date(), INTERVAL -{(int)filter} DAY)"; } query = string.Format(query, where); return(_client.ExecuteQuery(query, parameters: null) .Select(x => new TokenUsage { Address = (string)x["token_address"], Name = (string)x["name"], TransferCount = (long)x["transfer_count"], })); }
private static void RunQuery(BigQueryClient client, string query) { Console.WriteLine($"Testing query: {query}"); TimeSpan? firstRow = null; Stopwatch stopwatch = Stopwatch.StartNew(); var results = client.ExecuteQuery(query, parameters: null); TimeSpan queryCompleted = stopwatch.Elapsed; int rows = 0; foreach (var row in results) { firstRow = firstRow ?? stopwatch.Elapsed; rows++; } TimeSpan finished = stopwatch.Elapsed; Console.WriteLine($"Total rows: {rows}"); Console.WriteLine($"ExecuteQuery time: {queryCompleted.TotalSeconds}s"); if (firstRow != null) { Console.WriteLine($"First row retrieved: {firstRow.Value.TotalSeconds}s"); } Console.WriteLine($"Finished: {finished.TotalSeconds}s"); Console.WriteLine(); }
public void ExecuteQuery_Timeout() { // SQL that I happen to know takes over 10 seconds to query. string sql = "SELECT id FROM [bigquery-public-data:github_repos.contents] where content contains 'NodaTime' AND content contains '2.0.2' LIMIT 1000"; BigQueryClient client = BigQueryClient.Create(_fixture.ProjectId); var queryOptions = new QueryOptions { UseLegacySql = true, UseQueryCache = false }; var resultsOptions = new GetQueryResultsOptions { Timeout = TimeSpan.FromSeconds(2) }; Assert.Throws <TimeoutException>(() => client.ExecuteQuery(sql, null, queryOptions, resultsOptions)); }
public IEnumerable <int> Get() { string query = @"SELECT DIV(TIMESTAMP_DIFF(MAX(t.block_timestamp), c.block_timestamp, HOUR), 168) AS d FROM `bigquery-public-data.ethereum_blockchain.contracts` AS c LEFT JOIN `bigquery-public-data.ethereum_blockchain.transactions` AS t ON c.address = t.to_address WHERE c.block_timestamp < t.block_timestamp GROUP BY c.address, c.block_timestamp ORDER BY 1 DESC"; var result = _client.ExecuteQuery(query, parameters: null).ToList(); return(Enumerable.Range(0, (int)(long)result[0][0]) .Select(x => result.Count(y => (long)y[0] >= x))); }
public IEnumerable <DeploymentStat> Get() { string query = @"SELECT TIMESTAMP_TRUNC(block_timestamp, DAY) AS d, COUNT(*) AS c FROM `bigquery-public-data.ethereum_blockchain.contracts` GROUP BY 1 ORDER BY 1"; return(_client.ExecuteQuery(query, parameters: null) .Select(x => new DeploymentStat { Date = (DateTime)x["d"], Count = (long)x["c"], })); }
/// <summary> /// Used for getting SampleNames for comboBox /// </summary> /// <typeparam name="TData"></typeparam> /// <returns></returns> public List <string> GetRecipeList <TData>() { var lst = new List <string>(); try { string sql = $"SELECT DISTINCT SampleName FROM {_table} LIMIT 500"; var result = _client.ExecuteQuery(sql, parameters: null); foreach (var row in result) { lst.Add(row["SampleName"].ToString()); } } catch (TimeoutException) { } catch (Exception exc) { Console.WriteLine(exc); } if (lst.Contains("Testing")) { lst.Remove("Testing"); } return(lst); }
public IEnumerable <BlockNonceStat> Get() { string query = @"SELECT TIMESTAMP_TRUNC(timestamp, DAY) as d, ARRAY_AGG(nonce) as n FROM `bigquery-public-data.ethereum_blockchain.blocks` GROUP BY d ORDER BY d LIMIT 10"; return(_client.ExecuteQuery(query, parameters: null) .Select(x => new BlockNonceStat { Date = (DateTime)x["d"], Nonces = ((IList <string>)x["n"]).Select(n => Convert.ToUInt64(n, 16)) })); }
/// <summary> /// Waits for a query to return a non-empty result set. Some inserts may take a few seconds before the results are visible /// via queries - and much longer to show up in ListRows. (It looks like these are inserts with repeated fields and/or record fields.) /// </summary> private IEnumerable <BigQueryRow> WaitForRows(BigQueryClient client, BigQueryCommand command) { for (int i = 0; i < 5; i++) { var rows = client.ExecuteQuery(command) .PollUntilCompleted() .GetRows() .ToList(); if (rows.Count > 0) { return(rows); } Thread.Sleep(1000); } throw new TimeoutException("Expected rows were not available after 5 seconds"); }
private void AssertParameterRoundTrip(BigQueryClient client, BigQueryParameter parameter) { var results = client.ExecuteQuery( "SELECT ? AS value", new[] { parameter }, new QueryOptions { ParameterMode = BigQueryParameterMode.Positional }).ToList(); Assert.Equal(1, results.Count); Assert.Equal(parameter.Value, results[0]["value"]); if (parameter.Value is DateTime) { AssertDateTimeEqual((DateTime)parameter.Value, (DateTime)results[0]["value"]); } }
private void AssertParameterRoundTrip(BigQueryClient client, BigQueryParameter parameter) { var command = new BigQueryCommand($"SELECT ? AS value") { Parameters = { parameter }, ParameterMode = BigQueryParameterMode.Positional }; var results = client.ExecuteQuery(command).ToList(); Assert.Equal(1, results.Count); Assert.Equal(parameter.Value, results[0]["value"]); if (parameter.Value is DateTime) { AssertDateTimeEqual((DateTime)parameter.Value, (DateTime)results[0]["value"]); } }
//Method to send query and get response and then assemble the response. public ArrayList GetData() { ArrayList tempArray = new ArrayList(); try { //tempArray.Add(System.IO.Directory.GetCurrentDirectory()); //var credentials = GoogleCredential.FromFile("./Assets/Scripts/Additional/authKey.json"); //var credentials = GoogleCredential.FromFile("/Additional/authkey.json"); var credentials = GoogleCredential.FromJson("Not Today"); BigQueryClient ourClient = BigQueryClient.Create(projectCode, credentials); String query = this.FormQuery(); Debug.Log(query); BigQueryResults results = ourClient.ExecuteQuery(query, parameters: null); ArrayList stateData = new ArrayList(); StateData currentState = new StateData(); foreach (BigQueryRow row in results) { String date = Convert.ToString(row["date"]); String country = Convert.ToString(row["country_code"]); String state = Convert.ToString(row["subregion1_name"]); String coordinates = Convert.ToString(row["location_geometry"]); int confirmedCases = Convert.ToInt32(row["cumulative_confirmed"]); int deceased = Convert.ToInt32(row["cumulative_deceased"]); if (currentState.IsValidInsertion(state)) { currentState.AddRow(country, state, coordinates, date, confirmedCases, deceased); } else { stateData.Add(currentState); currentState = new StateData(); currentState.AddRow(country, state, coordinates, date, confirmedCases, deceased); getDataSize++; } } Debug.Log("Made it to data"); return(stateData); }catch (Exception e) { tempArray.Add(e.ToString()); return(tempArray); } }
public IEnumerable <Erc721Usage> Get() { string query = @"SELECT TIMESTAMP_TRUNC(tt.block_timestamp, MONTH) AS d, tt.token_address AS a, COUNT(*) as tf_c FROM `bigquery-public-data.ethereum_blockchain.token_transfers` as tt LEFT JOIN `bigquery-public-data.ethereum_blockchain.contracts` as c ON tt.token_address = c.address WHERE c.is_erc721 = true GROUP BY 1, 2 ORDER BY 1"; return(_client.ExecuteQuery(query, parameters: null) .Select(x => new Erc721Usage { Date = (DateTime)x["d"], Address = (string)x["a"], Count = (long)x["tf_c"], })); }
//BigQuery implementation public BigQueryResults BigQueryResults(string aDatasetId, string aTableName, string aQueryString) { try { BigQueryTable table = bigQuery.GetTable(projectId, aDatasetId, aTableName); //string sql = $"SELECT COUNT(*) FROM {table}"; string sql = aQueryString.Replace("TABLE", $"{table}"); BigQueryResults results = bigQuery.ExecuteQuery(sql); return(results); } catch (Exception aExeption) { Console.WriteLine(aExeption.ToString()); return(null); } }
public IEnumerable <MinerStat> Get(MinerStatFilter filter) { string query = @"SELECT miner, COUNT(*) as count FROM `bigquery-public-data.ethereum_blockchain.blocks` {0} GROUP BY miner ORDER BY 2 DESC LIMIT 1000"; string where = ""; if (filter != MinerStatFilter.Default) { where = $"WHERE DATE(timestamp) > DATE_ADD(current_date(), INTERVAL -{(int)filter} DAY)"; } query = string.Format(query, where); return(_client.ExecuteQuery(query, parameters: null) .Select(x => new MinerStat { Address = (string)x["miner"], Blocks = (long)x["count"], })); }
public IEnumerable <BlockStat> Get(BlockStatFilter filter) { string query = @"SELECT TIMESTAMP_TRUNC(timestamp, {0}) as date, COUNT(*) as count, CAST(AVG(difficulty) / 1000000000000 AS FLOAT64) as avg_difficulty, SUM(size) as size, AVG(gas_limit) as avg_gas_limit, SUM(gas_used) as gas_used, SUM(transaction_count) as tx_count FROM `bigquery-public-data.ethereum_blockchain.blocks` {1} GROUP BY date ORDER BY date"; string where = ""; if (filter != BlockStatFilter.Default) { where = $"WHERE DATE(timestamp) > DATE_ADD(current_date(), INTERVAL -{(int)filter} DAY)"; } var accuracy = filter == BlockStatFilter.Month ? "HOUR" : "DAY"; query = string.Format(query, accuracy, where); return(_client.ExecuteQuery(query, parameters: null) .Select(x => new BlockStat { Date = (DateTime)x["date"], Count = (long)x["count"], AvgDifficulty = (double)x["avg_difficulty"], Size = (long)x["size"], AvgGasLimit = (double)x["avg_gas_limit"], GasUsed = (long)x["gas_used"], TxCount = (long)x["tx_count"], })); }
public IEnumerable <BigQueryRow> Query(string command, IEnumerable <BigQueryParameter> parameters) { return(_bigQuery.ExecuteQuery(command, parameters)); }
public static void Main(string[] args) { //format should be YYMM-comments.json" Stack <string> dbDates = new Stack <string>(new List <string>() { "16_07", "16_08", "16_09", "16_10", "16_11", "16_12", "17_01", "17_02", "17_03", "17_04", "17_05", "17_06", }); string dbDate = "17_06"; string subreddit = "Seattle"; string dataset = "posts"; //string dataset = "comments"; BigQueryClient client = BigQueryClient.Create("aaaa-153204"); ProjectReference pr = client.GetProjectReference("fh-bigquery"); DatasetReference dr = new DatasetReference() { DatasetId = $"reddit_{dataset}", ProjectId = pr.ProjectId }; JsonSerializerSettings serializerSettings = new JsonSerializerSettings() { NullValueHandling = NullValueHandling.Include }; while (dbDates.Count > 0) { dbDate = dbDates.Pop(); string fileName = $"{dbDate.Replace("_", "")}-{dataset}.json"; string query = $@"SELECT * FROM `fh-bigquery.reddit_{dataset}.20{dbDate}` WHERE subreddit = '{subreddit}'"; BigQueryResults result = client.ExecuteQuery //(@"SELECT count(1) FROM `fh-bigquery.reddit_comments.2016_11` WHERE subreddit = 'SeattleWA'", (query, new ExecuteQueryOptions() { DefaultDataset = new DatasetReference() { DatasetId = $"reddit_{dataset}", ProjectId = "fh-bigquery" } }); //, new ExecuteQueryOptions() { UseQueryCache = true, DefaultDataset = new DatasetReference() { ProjectId = "fh-bigquery", DatasetId = "reddit_comments" } }); LinkedList <string> rows = new LinkedList <string>(); while (!result.Completed) { Console.WriteLine("Polling for completed query"); result = result.PollUntilCompleted(); } foreach (BigQueryRow row in result.GetRows(new Google.Api.Gax.PollSettings( Google.Api.Gax.Expiration.None, new TimeSpan(0, 0, 15)))) { if (dataset == "comments") { object rowObj = new { body = row["body"], score_hidden = (bool?)row["score_hidden"], archived = (bool?)row["archived"], name = row["name"], author = row["author"], author_flair_text = row["author_flair_text"], downs = (long?)row["downs"], created_utc = (long?)row["created_utc"], subreddit_id = row["subreddit_id"], link_id = row["link_id"], parent_id = row["parent_id"], score = (long?)row["score"], retrieved_on = (long?)row["retrieved_on"], controversiality = (long?)row["controversiality"], gilded = (long?)row["gilded"], id = row["id"], subreddit = row["subreddit"], ups = (long?)row["ups"], distinguished = row["distinguished"], author_flair_css_class = row["author_flair_css_class"] }; rows.AddLast( JsonConvert.SerializeObject(rowObj, Formatting.None, serializerSettings )); } else { object rowPostObj = new { created_utc = (long?)row["created_utc"], subreddit = row["subreddit"], author = row["author"], domain = row["domain"], url = row["url"], num_comments = (long?)row["num_comments"], score = (long?)row["score"], ups = (long?)row["ups"], downs = (long?)row["downs"], title = row["title"], selftext = row["selftext"], saved = (bool?)row["saved"], id = row["id"], from_kind = row["from_kind"], gilded = (long?)row["gilded"], from = row["from"], stickied = (bool?)row["stickied"], retrieved_on = (long?)row["retrieved_on"], over_18 = (bool?)row["over_18"], thumbnail = row["thumbnail"], subreddit_id = row["subreddit_id"], hide_score = (bool?)row["hide_score"], link_flair_css_class = row["link_flair_css_class"], author_flair_css_class = row["author_flair_css_class"], archived = (bool?)row["archived"], is_self = (bool?)row["is_self"], from_id = row["from_id"], permalink = row["permalink"], name = row["name"], author_flair_text = row["author_flair_text"], quarantine = (bool?)row["quarantine"], link_flair_text = row["link_flair_text"], distinguished = row["distinguished"] }; rows.AddLast( JsonConvert.SerializeObject(rowPostObj, Formatting.None, serializerSettings )); } // string.Join(",", row.RawRow.F.Select(x => x.V != null ? x.V.ToString(): "null")) //); } System.IO.File.WriteAllLines(@"D:\dev\data\" + dataset + "\\" + subreddit + "\\" + fileName, rows); } Console.WriteLine("Complete"); Console.ReadLine(); }