public void CopyTable() { // TODO: Make this simpler in the wrapper var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; var destinationTableId = Guid.NewGuid().ToString().Replace('-', '_'); if (!WaitForStreamingBufferToEmpty(historyTableId)) { Console.WriteLine("Streaming buffer not empty after 30 seconds; not performing export"); return; } // Sample: CopyTable BigqueryClient client = BigqueryClient.Create(projectId); Job job = client.Service.Jobs.Insert(new Job { Configuration = new JobConfiguration { Copy = new JobConfigurationTableCopy { DestinationTable = client.GetTableReference(datasetId, destinationTableId), SourceTable = client.GetTableReference(datasetId, historyTableId) } } }, projectId).Execute(); // Wait until the copy has finished. client.PollJob(job.JobReference); // Now list its rows BigqueryResult result = client.ListRows(datasetId, destinationTableId); foreach (BigqueryResult.Row row in result.Rows) { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); } // End sample var originalRows = client.ListRows(datasetId, historyTableId).Rows.Count(); var copiedRows = result.Rows.Count(); Assert.Equal(originalRows, copiedRows); }
public void ExportCsv() { // TODO: Make this simpler in the wrapper var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; string bucket = "bigquerysnippets-" + Guid.NewGuid().ToString().ToLowerInvariant(); string objectName = "table.csv"; if (!WaitForStreamingBufferToEmpty(historyTableId)) { Console.WriteLine("Streaming buffer not empty after 30 seconds; not performing export"); return; } // Sample: ExportCsv BigqueryClient client = BigqueryClient.Create(projectId); // Create a storage bucket; in normal use it's likely that one would exist already. StorageClient storageClient = StorageClient.Create(); storageClient.CreateBucket(projectId, bucket); string destinationUri = $"gs://{bucket}/{objectName}"; Job job = client.Service.Jobs.Insert(new Job { Configuration = new JobConfiguration { Extract = new JobConfigurationExtract { DestinationFormat = "CSV", DestinationUris = new[] { destinationUri }, SourceTable = client.GetTableReference(datasetId, historyTableId) } } }, projectId).Execute(); // Wait until the export has finished. var result = client.PollJob(job.JobReference); // If there are any errors, display them *then* fail. if (result.Status.ErrorResult != null) { foreach (var error in result.Status.Errors) { Console.WriteLine(error.Message); } } Assert.Null(result.Status.ErrorResult); MemoryStream stream = new MemoryStream(); storageClient.DownloadObject(bucket, objectName, stream); Console.WriteLine(Encoding.UTF8.GetString(stream.ToArray())); // End sample storageClient.DeleteObject(bucket, objectName); storageClient.DeleteBucket(bucket); }
public void PopulateTable( string query, string datasetId, string newTableId, BigqueryClient client) { var destination = client.GetTableReference(datasetId, newTableId); BigqueryJob job = client.CreateQueryJob(query, new CreateQueryJobOptions { DestinationTable = destination }); // Wait for the job to complete. job.PollQueryUntilCompleted(); }
// [START copy_table] public void CopyTable( string datasetId, string tableIdToBeCopied, string newTableId, BigqueryClient client) { var table = client.GetTable(datasetId, tableIdToBeCopied); string query = $"SELECT * FROM {table}"; var destination = client.GetTableReference(datasetId, newTableId); BigqueryJob job = client.CreateQueryJob(query, new CreateQueryJobOptions { DestinationTable = destination }); // Wait for the job to complete. job.PollQueryUntilCompleted(); }
public void CreateQueryJob() { var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; var queryTableId = Guid.NewGuid().ToString().Replace('-', '_'); // Snippet: CreateQueryJob(*,*) BigqueryClient client = BigqueryClient.Create(projectId); BigqueryTable table = client.GetTable(datasetId, historyTableId); TableReference destination = client.GetTableReference(datasetId, queryTableId); // If the destination table is not specified, the results will be stored in // a temporary table. BigqueryJob job = client.CreateQueryJob( $@"SELECT player, MAX(score) AS score FROM {table} GROUP BY player ORDER BY score DESC", new CreateQueryJobOptions { DestinationTable = destination }); // Wait for the job to complete. job.Poll(); // Then we can fetch the results, either via the job or by accessing // the destination table. BigqueryResult result = client.GetQueryResults(job.Reference); foreach (var row in result.Rows) { Console.WriteLine($"{row["player"]}: {row["score"]}"); } // End snippet var players = result.Rows.Select(r => (string)r["player"]).ToList(); Assert.Contains("Ben", players); Assert.Contains("Nadia", players); Assert.Contains("Tim", players); }