public void ThrowOnAnyError() { string projectId = _fixture.ProjectId; // Snippet: ThrowOnAnyError BigQueryClient client = BigQueryClient.Create(projectId); string sql = $"This is a broken query"; BigQueryJob query = client.CreateQueryJob(sql, parameters: null).PollUntilCompleted(); try { // Usually this method is called in a chain. It returns the same job // if there are no errors. query = query.ThrowOnAnyError(); } catch (GoogleApiException exception) { foreach (var error in exception.Error.Errors) { Console.WriteLine($"Location: {error.Location}; Reason: {error.Reason}; Message: {error.Message}"); } } // End sample Assert.Throws <GoogleApiException>(() => query.ThrowOnAnyError()); }
public BigQueryJob CreateJob(string projectId = "your-project-id") { string query = @" SELECT country_name from `bigquery-public-data.utility_us.country_code_iso"; // Initialize client that will be used to send requests. BigQueryClient client = BigQueryClient.Create(projectId); QueryOptions queryOptions = new QueryOptions { JobLocation = "us", JobIdPrefix = "code_sample_", Labels = new Dictionary <string, string> { ["example-label"] = "example-value" }, MaximumBytesBilled = 1000000 }; BigQueryJob queryJob = client.CreateQueryJob( sql: query, parameters: null, options: queryOptions); Console.WriteLine($"Started job: {queryJob.Reference.JobId}"); return(queryJob); }
public void ListJobs_FilterByLabels() { string bucketName = _fixture.StorageBucketName; string objectName = _fixture.GenerateStorageObjectName(); string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; // Snippet: Labels IDictionary <string, string> labels = new Dictionary <string, string>() { { "label-key", "label-value" } }; BigQueryClient client = BigQueryClient.Create(projectId); BigQueryTable table = client.GetTable(projectId, datasetId, tableId); string destinationUri = $"gs://{bucketName}/{objectName}"; // Just a couple examples of jobs marked with labels: // (These jobs will most certainly be created somewhere else.) // Running a query on a given table. BigQueryJob oneLabeledJob = client.CreateQueryJob( $"SELECT * FROM {table}", null, new QueryOptions { Labels = labels }); // Extracting data from a table to GCS. BigQueryJob anotherLabeledJob = client.CreateExtractJob( projectId, datasetId, tableId, destinationUri, new CreateExtractJobOptions { Labels = labels }); // Find jobs marked with a certain label. KeyValuePair <string, string> labelToBeFound = labels.First(); // Specify full projection to make sure that // label information, if it exists, is returned for listed jobs. ListJobsOptions options = new ListJobsOptions { Projection = ProjectionEnum.Full }; List <BigQueryJob> jobs = client .ListJobs(options) .Where(job => job.Resource.Configuration.Labels?.Contains(labelToBeFound) ?? false) .Take(2) .ToList(); foreach (BigQueryJob job in jobs) { Console.WriteLine(job.Reference.JobId); } // End snippet // This test added two jobs with such labels, other tests might have // added more. Assert.True(jobs.Count >= 2); }
public void BrokenQuery() { BigQueryClient client = BigQueryClient.Create(_fixture.ProjectId); string sql = $"This is a broken query"; var job = client.CreateQueryJob(sql, parameters: null); Assert.Throws <GoogleApiException>(() => job.GetQueryResults()); }
public List <string> GetPopularTags() { const string format = "yyyyMMdd"; const string tablePrefix = "analytics_198930456.events_"; const string todayTablePrefix = "analytics_198930456.events_intraday_"; var todayDate = DateTime.Today; string todayTable = todayTablePrefix + todayDate.ToString(format); string d2Table = tablePrefix + todayDate.AddDays(-1).ToString(format); string d3Table = tablePrefix + todayDate.AddDays(-2).ToString(format); CreateTableIfNotExist(todayTable); CreateTableIfNotExist(d2Table); CreateTableIfNotExist(d3Table); string query = @" SELECT event_name, COUNT(event_name) AS tag_count FROM ( " + "SELECT event_name FROM `" + todayTable + "`, UNNEST(event_params) AS p1 WHERE p1.key = 'TAG' " + "UNION ALL SELECT event_name FROM `" + d2Table + "`, UNNEST(event_params) AS p2 WHERE p2.key = 'TAG' " + "UNION ALL SELECT event_name FROM `" + d3Table + "`, UNNEST(event_params) AS p3 WHERE p3.key = 'TAG')" + "GROUP BY event_name " + "ORDER BY tag_count DESC " + "LIMIT 5"; BigQueryJob job = client.CreateQueryJob( sql: query, parameters: null, options: new QueryOptions { UseQueryCache = false }); job.PollUntilCompleted(); List <string> tags = new List <string>(); foreach (BigQueryRow row in client.GetQueryResults(job.Reference)) { var tag = $"{row["event_name"]}"; tags.Add(tag); } return(tags); }
public void PopulateTable( string query, string datasetId, string newTableId, BigQueryClient client) { var destination = client.GetTableReference(datasetId, newTableId); BigQueryJob job = client.CreateQueryJob(query, new QueryOptions { DestinationTable = destination }); // Wait for the job to complete. job.GetQueryResults(); }
public BigQueryJob CreateQueryJob(BigQueryClient client, string query) { BigQueryJob job = client.CreateQueryJob( sql: query, parameters: null, options: new QueryOptions { UseQueryCache = false } ); return(job); }
// [START copy_table] public void CopyTable( string datasetId, string tableIdToBeCopied, string newTableId, BigQueryClient client) { var table = client.GetTable(datasetId, tableIdToBeCopied); string query = $"SELECT * FROM {table}"; var destination = client.GetTableReference(datasetId, newTableId); BigQueryJob job = client.CreateQueryJob(query, new QueryOptions { DestinationTable = destination }); // Wait for the job to complete. job.GetQueryResults(); }
public void GetQueryResults_Timeout() { // SQL that I happen to know takes over 10 seconds to query. string sql = "SELECT id FROM [bigquery-public-data:github_repos.contents] where content contains 'NodaTime' AND content contains '2.0.2' LIMIT 1000"; BigQueryClient client = BigQueryClient.Create(_fixture.ProjectId); var queryOptions = new QueryOptions { UseLegacySql = true, UseQueryCache = false }; var job = client.CreateQueryJob(sql, null, queryOptions); var resultsOptions = new GetQueryResultsOptions { Timeout = TimeSpan.FromSeconds(2) }; Assert.Throws <TimeoutException>(() => job.GetQueryResults(resultsOptions)); }
// [END sync_query_legacy_sql] // [START async_query] public BigQueryResults AsyncQuery(string projectId, string datasetId, string tableId, string query, BigQueryClient client) { var table = client.GetTable(projectId, datasetId, tableId); BigQueryJob job = client.CreateQueryJob(query, new QueryOptions { UseQueryCache = false }); // Wait for the job to complete. job.PollUntilCompleted(); // Then we can fetch the results, either via the job or by accessing // the destination table. return(client.GetQueryResults(job.Reference.JobId)); }
// [END sync_query] // [START sync_query_legacy_sql] public BigQueryResults LegacySqlSyncQuery(string projectId, string datasetId, string tableId, string query, double timeoutMs, BigQueryClient client) { var table = client.GetTable(projectId, datasetId, tableId); BigQueryJob job = client.CreateQueryJob(query, new QueryOptions { UseLegacySql = true }); // Get the query result, waiting for the timespan specified in milliseconds. BigQueryResults result = client.GetQueryResults(job.Reference.JobId, new GetQueryResultsOptions { Timeout = TimeSpan.FromMilliseconds(timeoutMs) }); return(result); }
public static List <CHITIETTRAM> Update(string matram, string mo, string da) { List <CHITIETTRAM> re = new List <CHITIETTRAM>(); // Add file json. using (BigQueryClient client = BigQueryClient.Create("phantantai", GoogleCredential.FromFile(@"C:\Users\ykdn1\OneDrive\Máy tính\oracle18.5\oracleteam3\oraclenhom3\oraclenhom3\App_Data\phantantai-c3450caeb9b5.json"))) { string query = $@" SELECT stn as MATRAM,da as DA,mo as MO,year as YEAR ,temp as NHIETDO,slp as APSUAT,wdsp as TOCDOGIO,max as TMAX,min as TMIN,prcp as LUONGMUA FROM `bigquery-public-data.noaa_gsod.gsod2019` where stn = '{matram}' and mo = '{mo}' and da = '{da}'" ; BigQueryJob job = client.CreateQueryJob( sql: query, parameters: null, options: new QueryOptions { UseQueryCache = false }); // Wait for the job to complete. job.PollUntilCompleted(); CHITIETTRAM chitie = new CHITIETTRAM(); foreach (BigQueryRow row in client.GetQueryResults(job.Reference)) { chitie.MATRAM = int.Parse($"{row["MATRAM"]}"); chitie.MO = byte.Parse($"{row["MO"]}"); chitie.DA = short.Parse($"{row["DA"]}"); chitie.YEAR = short.Parse($"{row["YEAR"]}"); var nd = float.Parse($"{row["NHIETDO"]}"); chitie.NHIETDO = (byte)nd; var tmp = float.Parse($"{row["APSUAT"]}"); chitie.APSUAT = tmp < 900 ? (short)tmp : (short)0; tmp = float.Parse($"{row["TOCDOGIO"]}"); chitie.TOCDOGIO = tmp < 900 ? (short)tmp : (short)0; tmp = float.Parse($"{row["TMAX"]}"); chitie.TMAX = tmp < 900 ? (short)tmp : (short)0; tmp = float.Parse($"{row["TMIN"]}"); chitie.TMIN = tmp < 900 ? (short)tmp : (short)0; tmp = float.Parse($"{row["LUONGMUA"]}"); chitie.LUONGMUA = tmp < 900 ? (short)tmp : (short)0; re.Add(chitie); } return(re); } }
internal void CreateModel(BigQueryClient client, string datasetId, string modelId) { string createModelSql = $@" CREATE MODEL {datasetId}.{modelId} OPTIONS (model_type='linear_reg', input_label_cols=['label'], max_iteration = 1, learn_rate=0.4, learn_rate_strategy='constant') AS SELECT 'a' AS f1, 2.0 AS label UNION ALL SELECT 'b' AS f1, 3.8 AS label"; var createModelJob = client.CreateQueryJob(createModelSql, null); createModelJob.PollUntilCompleted().ThrowOnAnyError(); }
public void Query( string projectId = "your-project-id" ) { BigQueryClient client = BigQueryClient.Create(projectId); string query = @" SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` WHERE state = 'TX' LIMIT 100"; BigQueryJob job = client.CreateQueryJob( sql: query, parameters: null, options: new QueryOptions { UseQueryCache = false }); // Wait for the job to complete. job = job.PollUntilCompleted().ThrowOnAnyError(); // Display the results foreach (BigQueryRow row in client.GetQueryResults(job.Reference)) { Console.WriteLine($"{row["name"]}"); } }
// [START copy_table] public void CopyTable( string datasetId, string tableIdToBeCopied, string newTableId, BigQueryClient client) { var table = client.GetTable(datasetId, tableIdToBeCopied); string query = $"SELECT * FROM {table}"; var destination = client.GetTableReference(datasetId, newTableId); BigQueryJob job = client.CreateQueryJob(query, new CreateQueryJobOptions { DestinationTable = destination }); // Wait for the job to complete. job.PollQueryUntilCompleted(); }
public void PopulateTable( string query, string datasetId, string newTableId, BigQueryClient client) { var destination = client.GetTableReference(datasetId, newTableId); BigQueryJob job = client.CreateQueryJob(query, new CreateQueryJobOptions { DestinationTable = destination }); // Wait for the job to complete. job.PollQueryUntilCompleted(); }
// [END sync_query_legacy_sql] // [START async_query] public BigQueryResults AsyncQuery(string projectId, string datasetId, string tableId, string query, BigQueryClient client) { var table = client.GetTable(projectId, datasetId, tableId); BigQueryJob job = client.CreateQueryJob(query, new CreateQueryJobOptions { UseQueryCache = false }); // Wait for the job to complete. job.PollUntilCompleted(); // Then we can fetch the results, either via the job or by accessing // the destination table. return client.GetQueryResults(job.Reference.JobId); }
// [END sync_query] // [START sync_query_legacy_sql] public BigQueryResults LegacySqlSyncQuery(string projectId, string datasetId, string tableId, string query, double timeoutMs, BigQueryClient client) { var table = client.GetTable(projectId, datasetId, tableId); BigQueryJob job = client.CreateQueryJob(query, new CreateQueryJobOptions { UseLegacySql = true }); // Get the query result, waiting for the timespan specified in milliseconds. BigQueryResults result = client.GetQueryResults(job.Reference.JobId, new GetQueryResultsOptions { Timeout = TimeSpan.FromMilliseconds(timeoutMs) }); return result; }