public void CopyTable() { // TODO: Make this simpler in the wrapper var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; var destinationTableId = Guid.NewGuid().ToString().Replace('-', '_'); if (!WaitForStreamingBufferToEmpty(historyTableId)) { Console.WriteLine("Streaming buffer not empty after 30 seconds; not performing export"); return; } // Sample: CopyTable BigqueryClient client = BigqueryClient.Create(projectId); Job job = client.Service.Jobs.Insert(new Job { Configuration = new JobConfiguration { Copy = new JobConfigurationTableCopy { DestinationTable = client.GetTableReference(datasetId, destinationTableId), SourceTable = client.GetTableReference(datasetId, historyTableId) } } }, projectId).Execute(); // Wait until the copy has finished. client.PollJob(job.JobReference); // Now list its rows BigqueryResult result = client.ListRows(datasetId, destinationTableId); foreach (BigqueryResult.Row row in result.Rows) { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); } // End sample var originalRows = client.ListRows(datasetId, historyTableId).Rows.Count(); var copiedRows = result.Rows.Count(); Assert.Equal(originalRows, copiedRows); }
// [END list_projects] // [START list_rows] public int ListRows( string projectId, string datasetId, string tableId, int numberOfRows, BigqueryClient client) { int recordCount = 0; var result = client.ListRows(projectId, datasetId, tableId, null, new ListRowsOptions { PageSize = numberOfRows }); foreach (var row in result.Take(numberOfRows)) { Console.WriteLine($"{row["word"]}: {row["corpus"]}"); recordCount++; } return(recordCount); }
// [END list_rows] // [START browse_table] public int TableDataList( string datasetId, string tableId, int pageSize, BigqueryClient client) { int recordCount = 0; var result = client.ListRows(datasetId, tableId, null, new ListRowsOptions { PageSize = pageSize }); // If there are more rows than were returned in the first page of results, // iterating over the rows will lazily evaluate the results each time, // making further requests as necessary. foreach (var row in result) { Console.WriteLine($"{row["title"]}: {row["unique_words"]}"); recordCount++; } return(recordCount); }
// [END stream_row] // [START export_to_cloud_storage] public void ExportJsonToGcs( string datasetId, string tableId, string bucketName, string fileName, BigqueryClient client) { StorageClient gcsClient = StorageClient.Create(); string contentType = "application/json"; // Get Table and append results into StringBuilder. var result = client.ListRows(datasetId, tableId); StringBuilder sb = new StringBuilder(); foreach (var row in result) { sb.Append($"{{\"title\" : \"{row["title"]}\", \"unique_words\":\"{row["unique_words"]}\"}}{Environment.NewLine}"); } // Save stream to Google Cloud Storage. using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString()))) { var obj = gcsClient.UploadObject(bucketName, fileName, contentType, stream); } }
// [END export_to_cloud_storage] public void ExportCsvToGcs( string datasetId, string tableId, string bucketName, string fileName, BigqueryClient client) { StorageClient gcsClient = StorageClient.Create(); string contentType = "text/csv"; // Get Table and input results into StringBuilder. var result = client.ListRows(datasetId, tableId); StringBuilder sb = new StringBuilder(); // Create header row. sb.Append($"title, unique_words,{Environment.NewLine}"); foreach (var row in result.Rows) { sb.Append($"{row["title"]}, {row["unique_words"]},{Environment.NewLine}"); } // Save stream to Google Cloud Storage. using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString()))) { var obj = gcsClient.UploadObject(bucketName, fileName, contentType, stream); } }
public void ListRows() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; // Snippet: ListRows(*,*,*) BigqueryClient client = BigqueryClient.Create(projectId); BigqueryResult result = client.ListRows(datasetId, tableId); foreach (BigqueryResult.Row row in result.Rows) { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); } // End snippet // We set up 7 results in the fixture. Other tests may add more. Assert.True(result.Rows.Count() >= 7); }