public void ExecuteQuery() { var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; // Snippet: ExecuteQuery BigqueryClient client = BigqueryClient.Create(projectId); BigqueryTable table = client.GetTable(datasetId, historyTableId); BigqueryResult result = client.ExecuteQuery( $@"SELECT player, MAX(score) AS score FROM {table} GROUP BY player ORDER BY score DESC"); foreach (var row in result.Rows) { Console.WriteLine($"{row["player"]}: {row["score"]}"); } // End snippet var players = result.Rows.Select(r => (string)r["player"]).ToList(); Assert.Contains("Ben", players); Assert.Contains("Nadia", players); Assert.Contains("Tim", players); }
public void Insert() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; BigqueryTable table = BigqueryClient.Create(projectId).GetTable(datasetId, tableId); int rowsBefore = table.ListRows().Rows.Count(); // Snippet: Insert(string,string,*) BigqueryClient client = BigqueryClient.Create(projectId); // The insert ID is optional, but can avoid duplicate data // when retrying inserts. InsertRow row1 = new InsertRow("row1") { { "player", "Jane" }, { "level", 3 }, { "score", 3600 }, { "game_started", DateTime.UtcNow } }; InsertRow row2 = new InsertRow("row2") { { "player", "Jeff" }, { "level", 2 }, { "score", 2000 }, { "game_started", DateTime.UtcNow } }; client.Insert(datasetId, tableId, row1, row2); // End snippet int rowsAfter = table.ListRows().Rows.Count(); Assert.Equal(rowsBefore + 2, rowsAfter); }
// [END create_dataset] // [START create_table] public void CreateTable(string datasetId, string tableId, BigqueryClient client) { var dataset = client.GetDataset(datasetId); // Create schema for new table. var schema = new TableSchemaBuilder { { "title", BigqueryDbType.String }, { "unique_words", BigqueryDbType.Integer } }.Build(); // Create the table if it doesn't exist. BigqueryTable table = dataset.GetOrCreateTable(tableId, schema); }
public void UploadCsv() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; BigqueryTable table = BigqueryClient.Create(projectId).GetTable(datasetId, tableId); int rowsBefore = table.ListRows().Rows.Count(); // Snippet: UploadCsv(*,*,*,*,*) BigqueryClient client = BigqueryClient.Create(projectId); string[] csvRows = { "player,score,level,game_started", "Tim,5000,3,2014-08-19T12:41:35.220Z", "Holly,6000,4,2014-08-03T08:45:35.123Z", "Jane,2402,1,2015-01-20T10:13:35.059Z" }; // Normally we'd be uploading from a file or similar. Any readable stream can be used. var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", csvRows))); // This example uploads data to an existing table. If the upload will create a new table // or if the schema in the CSV isn't identical to the schema in the table (for example if the // columns are in a different order), create a schema to pass into the call. TableSchema schema = null; BigqueryJob job = client.UploadCsv(datasetId, tableId, schema, stream, // Our sample data has a header row, so we need to skip it. new UploadCsvOptions { SkipLeadingRows = 1 }); // Use the job to find out when the data has finished being inserted into the table, // report errors etc. // End snippet var result = job.Poll(); // If there are any errors, display them *then* fail. if (result.Status.ErrorResult != null) { foreach (var error in result.Status.Errors) { Console.WriteLine(error.Message); } } Assert.Null(result.Status.ErrorResult); int rowsAfter = table.ListRows().Rows.Count(); Assert.Equal(rowsBefore + 3, rowsAfter); }
public void UploadJson() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; BigqueryTable table = BigqueryClient.Create(projectId).GetTable(datasetId, tableId); int rowsBefore = table.ListRows().Rows.Count(); // Snippet: UploadJson(*,*,*,*,*) BigqueryClient client = BigqueryClient.Create(projectId); // Note that there's a single line per JSON object. This is not a JSON array. IEnumerable <string> jsonRows = new string[] { "{ 'player': 'John', 'score': 50, 'level': 1, 'game_started': '2014-08-19 12:41:35.220' }", "{ 'player': 'Zoe', 'score': 605, 'level': 1, 'game_started': '2016-01-01 08:30:35.000' }", }.Select(row => row.Replace('\'', '"')); // Simple way of representing C# in JSON to avoid escaping " everywhere. // Normally we'd be uploading from a file or similar. Any readable stream can be used. var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", jsonRows))); // This example uploads data to an existing table. If the upload will create a new table // or if the schema in the JSON isn't identical to the schema in the table, // create a schema to pass into the call. TableSchema schema = null; BigqueryJob job = client.UploadJson(datasetId, tableId, schema, stream); // Use the job to find out when the data has finished being inserted into the table, // report errors etc. // End snippet var result = job.Poll(); // If there are any errors, display them *then* fail. if (result.Status.ErrorResult != null) { foreach (var error in result.Status.Errors) { Console.WriteLine(error.Message); } } Assert.Null(result.Status.ErrorResult); int rowsAfter = table.ListRows().Rows.Count(); Assert.Equal(rowsBefore + 2, rowsAfter); }
public void CreateQueryJob() { var projectId = _fixture.ProjectId; var datasetId = _fixture.GameDatasetId; var historyTableId = _fixture.HistoryTableId; var queryTableId = Guid.NewGuid().ToString().Replace('-', '_'); // Snippet: CreateQueryJob(*,*) BigqueryClient client = BigqueryClient.Create(projectId); BigqueryTable table = client.GetTable(datasetId, historyTableId); TableReference destination = client.GetTableReference(datasetId, queryTableId); // If the destination table is not specified, the results will be stored in // a temporary table. BigqueryJob job = client.CreateQueryJob( $@"SELECT player, MAX(score) AS score FROM {table} GROUP BY player ORDER BY score DESC", new CreateQueryJobOptions { DestinationTable = destination }); // Wait for the job to complete. job.Poll(); // Then we can fetch the results, either via the job or by accessing // the destination table. BigqueryResult result = client.GetQueryResults(job.Reference); foreach (var row in result.Rows) { Console.WriteLine($"{row["player"]}: {row["score"]}"); } // End snippet var players = result.Rows.Select(r => (string)r["player"]).ToList(); Assert.Contains("Ben", players); Assert.Contains("Nadia", players); Assert.Contains("Tim", players); }
// Creates a new empty table in Google BigQuery public void CreateBigQueryTable(string projectName, string dataSetName, string tableName) { if (_DataTypeMap == null) { throw new Exception("DataTypeMap can not be null. Call UploadTableToStorage() or MapColumnTypes() method first"); } BigqueryClient client = BigqueryClient.Create(projectName, _GoogleAPICredential); // Build the schema with Google's schema object and our DataTypeMap TableSchemaBuilder sBuilder = new TableSchemaBuilder(); for (int i = 0; i < _DataTypeMap.Keys.Count; i++) { sBuilder.Add(_DataTypeMap[i].ColumnName, _DataTypeMap[i].BQColumnType); } // Create the dataset if it doesn't exist. BigqueryDataset dataset = client.GetOrCreateDataset(dataSetName); BigqueryTable table = dataset.GetOrCreateTable(tableName, sBuilder.Build()); }
public void CreateTable() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = Guid.NewGuid().ToString().Replace("-", "_"); // Snippet: CreateTable(string,string,*,*) BigqueryClient client = BigqueryClient.Create(projectId); TableSchema schema = new TableSchemaBuilder { { "from_player", BigqueryDbType.String }, { "to_player", BigqueryDbType.String }, { "message", BigqueryDbType.String } }.Build(); BigqueryTable table = client.CreateTable(datasetId, tableId, schema); // Now populate the table with data... // End snippet var tables = client.ListTables(datasetId); var ids = tables.Select(ds => ds.Reference.TableId).ToList(); Assert.Contains(tableId, ids); }
public void ListRows() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; // Snippet: ListRows BigqueryClient client = BigqueryClient.Create(projectId); BigqueryTable table = client.GetTable(datasetId, tableId); BigqueryResult result = table.ListRows(); foreach (BigqueryResult.Row row in result.Rows) { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); } // End snippet // We set up 7 results in the fixture. Other tests may add more. Assert.True(result.Rows.Count() >= 7); }
public async Task ListRowsAsync() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; // Snippet: ListRowsAsync BigqueryClient client = BigqueryClient.Create(projectId); BigqueryTable table = client.GetTable(datasetId, tableId); IPagedAsyncEnumerable <TableDataList, BigqueryRow> result = table.ListRowsAsync(); await result.ForEachAsync(row => { DateTime timestamp = (DateTime)row["game_started"]; long level = (long)row["level"]; long score = (long)row["score"]; string player = (string)row["player"]; Console.WriteLine($"{player}: {level}/{score} ({timestamp:yyyy-MM-dd HH:mm:ss})"); }); // End snippet // We set up 7 results in the fixture. Other tests may add more. Assert.True(await result.Count() >= 7); }