// Import CSV file from Google Storage bucket into existing Google BigQuery data table private void ImportFileFromStorageToBigQuery(BigqueryClient client, string projectName, string bucketName, string fileName, string dataSetName, string tableName) { StorageClient gcsClient = StorageClient.Create(_GoogleAPICredential); using (var stream = new MemoryStream()) { gcsClient.DownloadObject(bucketName, fileName, stream); // This uploads data to an existing table. If the upload will create a new table // or if the schema in the CSV isn't identical to the schema in the table, // create a schema to pass into the call instead of passing in a null value. BigqueryJob job = null; try { job = client.UploadCsv(dataSetName, tableName, null, stream); } catch (Exception e) { string m = e.Message; } // Use the job to find out when the data has finished being inserted into the table, // report errors etc. // Wait for the job to complete. try { job.Poll(); } catch (Exception e) { string m = e.Message; } } }
public void UploadCsv() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.HistoryTableId; BigqueryTable table = BigqueryClient.Create(projectId).GetTable(datasetId, tableId); int rowsBefore = table.ListRows().Rows.Count(); // Snippet: UploadCsv(*,*,*,*,*) BigqueryClient client = BigqueryClient.Create(projectId); string[] csvRows = { "player,score,level,game_started", "Tim,5000,3,2014-08-19T12:41:35.220Z", "Holly,6000,4,2014-08-03T08:45:35.123Z", "Jane,2402,1,2015-01-20T10:13:35.059Z" }; // Normally we'd be uploading from a file or similar. Any readable stream can be used. var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", csvRows))); // This example uploads data to an existing table. If the upload will create a new table // or if the schema in the CSV isn't identical to the schema in the table (for example if the // columns are in a different order), create a schema to pass into the call. TableSchema schema = null; BigqueryJob job = client.UploadCsv(datasetId, tableId, schema, stream, // Our sample data has a header row, so we need to skip it. new UploadCsvOptions { SkipLeadingRows = 1 }); // Use the job to find out when the data has finished being inserted into the table, // report errors etc. // End snippet var result = job.Poll(); // If there are any errors, display them *then* fail. if (result.Status.ErrorResult != null) { foreach (var error in result.Status.Errors) { Console.WriteLine(error.Message); } } Assert.Null(result.Status.ErrorResult); int rowsAfter = table.ListRows().Rows.Count(); Assert.Equal(rowsBefore + 3, rowsAfter); }