public void ExportCsv() { // TODO: Make this simpler in the wrapper var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; string bucket = "bigquerysnippets-" + Guid.NewGuid().ToString().ToLowerInvariant(); string objectName = "table.csv"; if (!WaitForStreamingBufferToEmpty(historyTableId)) { Console.WriteLine("Streaming buffer not empty after 30 seconds; not performing export"); return; } // Sample: ExportCsv BigqueryClient client = BigqueryClient.Create(projectId); // Create a storage bucket; in normal use it's likely that one would exist already. StorageClient storageClient = StorageClient.Create(); storageClient.CreateBucket(projectId, bucket); string destinationUri = $"gs://{bucket}/{objectName}"; Job job = client.Service.Jobs.Insert(new Job { Configuration = new JobConfiguration { Extract = new JobConfigurationExtract { DestinationFormat = "CSV", DestinationUris = new[] { destinationUri }, SourceTable = client.GetTableReference(datasetId, historyTableId) } } }, projectId).Execute(); // Wait until the export has finished. var result = client.PollJob(job.JobReference); // If there are any errors, display them *then* fail. if (result.Status.ErrorResult != null) { foreach (var error in result.Status.Errors) { Console.WriteLine(error.Message); } } Assert.Null(result.Status.ErrorResult); MemoryStream stream = new MemoryStream(); storageClient.DownloadObject(bucket, objectName, stream); Console.WriteLine(Encoding.UTF8.GetString(stream.ToArray())); // End sample storageClient.DeleteObject(bucket, objectName); storageClient.DeleteBucket(bucket); }
public void CopyTable() { // TODO: Make this simpler in the wrapper var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; var destinationTableId = Guid.NewGuid().ToString().Replace('-', '_'); if (!WaitForStreamingBufferToEmpty(historyTableId)) { Console.WriteLine("Streaming buffer not empty after 30 seconds; not performing export"); return; } // Sample: CopyTable BigqueryClient client = BigqueryClient.Create(projectId); Job job = client.Service.Jobs.Insert(new Job { Configuration = new JobConfiguration { Copy = new JobConfigurationTableCopy { DestinationTable = client.GetTableReference(datasetId, destinationTableId), SourceTable = client.GetTableReference(datasetId, historyTableId) } } }, projectId).Execute(); // Wait until the copy has finished. client.PollJob(job.JobReference); // Now list its rows BigqueryResult result = client.ListRows(datasetId, destinationTableId); foreach (BigqueryResult.Row row in result.Rows) { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); } // End sample var originalRows = client.ListRows(datasetId, historyTableId).Rows.Count(); var copiedRows = result.Rows.Count(); Assert.Equal(originalRows, copiedRows); }