public void ExecuteQuery() { var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; // Snippet: ExecuteQuery BigqueryClient client = BigqueryClient.Create(projectId); BigqueryTable table = client.GetTable(datasetId, historyTableId); BigqueryResult result = client.ExecuteQuery( $@"SELECT player, MAX(score) AS score FROM {table} GROUP BY player ORDER BY score DESC"); foreach (var row in result.Rows) { Console.WriteLine($"{row["player"]}: {row["score"]}"); } // End snippet var players = result.Rows.Select(r => (string)r["player"]).ToList(); Assert.Contains("Ben", players); Assert.Contains("Nadia", players); Assert.Contains("Tim", players); }
public void TestImportFromCloudStorage() { string datasetId = "datasetForTestImportFromCloudStorage"; string newTableId = "tableForTestImportFromCloudStorage"; string jsonGcsSampleFile = "sample.json"; string gcsFolder = "test"; string gcsUploadTestWord = "exampleJsonFromGCS"; CreateDataset(datasetId, _client); CreateTable(datasetId, newTableId, _client); // Import data. ImportDataFromCloudStorage(_projectId, datasetId, newTableId, _client, jsonGcsSampleFile, gcsFolder); // Run query to get table data. var newTable = _client.GetTable(datasetId, newTableId); string query = $"SELECT title, unique_words FROM {newTable}"; BigqueryResult results = AsyncQuery(_projectId, datasetId, newTableId, query, _client); // Get first row and confirm it contains the expected value. var row = results.Rows.First(); Assert.Equal(gcsUploadTestWord, row["title"]); DeleteTable(datasetId, newTableId, _client); DeleteDataset(datasetId, _client); }
public void TestAsyncQuery() { string projectId = "bigquery-public-data"; string datasetId = "samples"; string tableId = "shakespeare"; var table = _client.GetTable(projectId, datasetId, tableId); string query = $"SELECT TOP(corpus, 42) as title, COUNT(*) as unique_words FROM {table}"; BigqueryResult results = AsyncQuery(projectId, datasetId, tableId, query, _client); Assert.True(results.Rows.Count() > 0); }
public void CopyTable() { // TODO: Make this simpler in the wrapper var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; var destinationTableId = Guid.NewGuid().ToString().Replace('-', '_'); if (!WaitForStreamingBufferToEmpty(historyTableId)) { Console.WriteLine("Streaming buffer not empty after 30 seconds; not performing export"); return; } // Sample: CopyTable BigqueryClient client = BigqueryClient.Create(projectId); Job job = client.Service.Jobs.Insert(new Job { Configuration = new JobConfiguration { Copy = new JobConfigurationTableCopy { DestinationTable = client.GetTableReference(datasetId, destinationTableId), SourceTable = client.GetTableReference(datasetId, historyTableId) } } }, projectId).Execute(); // Wait until the copy has finished. client.PollJob(job.JobReference); // Now list its rows BigqueryResult result = client.ListRows(datasetId, destinationTableId); foreach (BigqueryResult.Row row in result.Rows) { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); } // End sample var originalRows = client.ListRows(datasetId, historyTableId).Rows.Count(); var copiedRows = result.Rows.Count(); Assert.Equal(originalRows, copiedRows); }
// [END import_file_from_gcs] // [START sync_query] public BigqueryResult SyncQuery(string projectId, string datasetId, string tableId, string query, double timeoutMs, BigqueryClient client) { var table = client.GetTable(projectId, datasetId, tableId); BigqueryJob job = client.CreateQueryJob(query, new CreateQueryJobOptions { UseQueryCache = false }); // Get the query result, waiting for the timespan specified in milliseconds. BigqueryResult result = client.GetQueryResults(job.Reference, new GetQueryResultsOptions { Timeout = TimeSpan.FromMilliseconds(timeoutMs) }); return(result); }
public void QueryOverview() { string projectId = _fixture.ProjectId; // Sample: QueryOverview var client = BigqueryClient.Create(projectId); var table = client.GetTable("bigquery-public-data", "samples", "shakespeare"); string sql = $"SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words FROM {table}"; BigqueryResult query = client.ExecuteQuery(sql); foreach (BigqueryResult.Row row in query.Rows) { Console.WriteLine($"{row["title"]}: {row["unique_words"]}"); } // End sample }
// [END stream_row] // [START export_to_cloud_storage] public void ExportJsonToGcs( string datasetId, string tableId, string bucketName, string fileName, BigqueryClient client) { StorageClient gcsClient = StorageClient.Create(); string contentType = "application/json"; // Get Table and append results into StringBuilder. BigqueryResult result = client.ListRows(datasetId, tableId); StringBuilder sb = new StringBuilder(); foreach (var row in result.Rows) { sb.Append($"{{\"title\" : \"{row["title"]}\", \"unique_words\":\"{row["unique_words"]}\"}}{Environment.NewLine}"); } // Save stream to Google Cloud Storage. using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString()))) { var obj = gcsClient.UploadObject(bucketName, fileName, contentType, stream); } }
public void TestImportDataFromStream() { string datasetId = "datasetForTestImportDataFromStream"; string newTableId = "tableForTestImportDataFromStream"; string gcsUploadTestWord = "exampleJsonFromStream"; CreateDataset(datasetId, _client); CreateTable(datasetId, newTableId, _client); // Import data. UploadJson(datasetId, newTableId, _client); // Query table to get first row and confirm it contains the expected value. var newTable = _client.GetTable(datasetId, newTableId); string query = $"SELECT title, unique_words FROM {newTable}"; BigqueryResult results = AsyncQuery(_projectId, datasetId, newTableId, query, _client); var row = results.Rows.First(); Assert.Equal(gcsUploadTestWord, row["title"]); DeleteTable(datasetId, newTableId, _client); DeleteDataset(datasetId, _client); }
public void CreateQueryJob() { var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; var queryTableId = Guid.NewGuid().ToString().Replace('-', '_'); // Snippet: CreateQueryJob(*,*) BigqueryClient client = BigqueryClient.Create(projectId); BigqueryTable table = client.GetTable(datasetId, historyTableId); TableReference destination = client.GetTableReference(datasetId, queryTableId); // If the destination table is not specified, the results will be stored in // a temporary table. BigqueryJob job = client.CreateQueryJob( $@"SELECT player, MAX(score) AS score FROM {table} GROUP BY player ORDER BY score DESC", new CreateQueryJobOptions { DestinationTable = destination }); // Wait for the job to complete. job.Poll(); // Then we can fetch the results, either via the job or by accessing // the destination table. BigqueryResult result = client.GetQueryResults(job.Reference); foreach (var row in result.Rows) { Console.WriteLine($"{row["player"]}: {row["score"]}"); } // End snippet var players = result.Rows.Select(r => (string)r["player"]).ToList(); Assert.Contains("Ben", players); Assert.Contains("Nadia", players); Assert.Contains("Tim", players); }
public void ListRows() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; // Snippet: ListRows(*,*,*) BigqueryClient client = BigqueryClient.Create(projectId); BigqueryResult result = client.ListRows(datasetId, tableId); foreach (BigqueryResult.Row row in result.Rows) { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); } // End snippet // We set up 7 results in the fixture. Other tests may add more. Assert.True(result.Rows.Count() >= 7); }
private static void Main(string[] args) { string projectId = null; if (args.Length == 0) { Console.WriteLine(usage); } else { projectId = args[0]; // [START setup] // By default, the Google.Bigquery.V2 library client will authenticate // using the service account file (created in the Google Developers // Console) specified by the GOOGLE_APPLICATION_CREDENTIALS // environment variable. If you are running on // a Google Compute Engine VM, authentication is completely // automatic. var client = BigqueryClient.Create(projectId); // [END setup] // [START query] var table = client.GetTable("bigquery-public-data", "samples", "shakespeare"); string query = $"SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words FROM {table}"; BigqueryResult result = client.ExecuteQuery(query); // [END query] // [START print_results] Console.Write("\nQuery Results:\n------------\n"); foreach (var row in result.Rows) { Console.WriteLine($"{row["title"]}: {row["unique_words"]}"); } // [END print_results] } Console.WriteLine("\nPress any key..."); Console.ReadKey(); }