private static int Main(string[] args) { if (args.Length != 2) { Console.WriteLine("Arguments: <project-id> <path to JSON file containing queries>"); return(1); } var client = BigQueryClient.Create(args[0]); // Force authentication client.ListProjects().ToList(); var json = File.ReadAllText(args[1]); string[] queries = JsonConvert.DeserializeObject <string[]>(json); foreach (var query in queries) { RunQuery(client, query); } return(0); }
public void PatchModel() { var client = BigQueryClient.Create(_fixture.ProjectId); string datasetId = _fixture.DatasetId; var modelId = _fixture.ModelId; var original = client.GetModel(datasetId, modelId); var originalDescription = original.Resource.Description; var patched = original.Patch(new Model { Description = $"{originalDescription}_patched" }, matchETag: false); // Check the results of the patch Assert.Equal($"{originalDescription}_patched", patched.Resource.Description); // Check that it's still valid if fetched directly var fetched = client.GetModel(datasetId, modelId); Assert.Equal($"{originalDescription}_patched", fetched.Resource.Description); Assert.Equal(fetched.Resource.ETag, patched.Resource.ETag); }
public async Task UpdateDatasetAsync_Conflict() { var client = BigQueryClient.Create(_fixture.ProjectId); var id = _fixture.CreateDatasetId(); var original = await client.CreateDatasetAsync(id, new CreateDatasetOptions { Description = "Description1", FriendlyName = "FriendlyName1" }); // Modify on the server, which will change the etag var sneaky = await client.GetDatasetAsync(id); sneaky.Resource.FriendlyName = "Sneak attack!"; sneaky.Update(); // Modify the originally-created version... original.Resource.Description = "Description2"; // Fails due to the conflict. var exception = await Assert.ThrowsAsync <GoogleApiException>(() => original.UpdateAsync()); Assert.Equal(HttpStatusCode.PreconditionFailed, exception.HttpStatusCode); }
public async Task PatchTableAsync_ConflictMatchEtag() { var client = BigQueryClient.Create(_fixture.ProjectId); string datasetId = _fixture.DatasetId; var tableId = _fixture.CreateTableId(); var original = await client.CreateTableAsync(datasetId, tableId, new Table { Description = "Description1", FriendlyName = "FriendlyName1" }); // Modify on the server, which will change the etag var sneaky = await client.GetTableAsync(datasetId, tableId); sneaky.Resource.FriendlyName = "Sneak attack!"; sneaky.Update(); // Fails due to the conflict. var exception = await Assert.ThrowsAsync <GoogleApiException>(() => original.PatchAsync(new Table { Description = "Description2" }, matchETag: true)); Assert.Equal(HttpStatusCode.PreconditionFailed, exception.HttpStatusCode); }
public void InsertRow_BadData_IgnoreBadData() { var client = BigQueryClient.Create(_fixture.ProjectId); var dataset = client.GetDataset(_fixture.DatasetId); // Don't insert into a table used by other tests... var table = dataset.CreateTable( _fixture.CreateTableId(), new TableSchemaBuilder { { "name", BigQueryDbType.String } }.Build()); var row = new BigQueryInsertRow { { "noSuchField", 10 } }; var options = new InsertOptions { AllowUnknownFields = true }; _fixture.InsertAndWait(table, () => table.InsertRow(row, options), 1); }
public void CreateLoadJob_Parquet() { TestEnvironment.SkipIfVpcSc(); string sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; var client = BigQueryClient.Create(_fixture.ProjectId); var tableId = _fixture.CreateTableId(); var tableRef = client.GetTableReference(_fixture.DatasetId, tableId); var options = new CreateLoadJobOptions { SourceFormat = FileFormat.Parquet }; var job = client.CreateLoadJob(sourceUri, tableRef, schema: null, options: options); job.PollUntilCompleted().ThrowOnAnyError(); var table = client.GetTable(tableRef); var rows = table.ListRows().ToList(); Assert.Equal(50, rows.Count); }
public void ExtractTableJson( string projectId = "your-project-id", string bucketName = "your-bucket-name") { BigQueryClient client = BigQueryClient.Create(projectId); string destinationUri = $"gs://{bucketName}/shakespeare.json"; var jobOptions = new CreateExtractJobOptions() { DestinationFormat = FileFormat.NewlineDelimitedJson }; BigQueryJob job = client.CreateExtractJob( projectId: "bigquery-public-data", datasetId: "samples", tableId: "shakespeare", destinationUri: destinationUri, options: jobOptions ); job.PollUntilCompleted(); // Waits for the job to complete. Console.Write($"Exported table to {destinationUri}."); }
public void InsertRows() { var client = BigQueryClient.Create(_fixture.ProjectId); var dataset = client.GetDataset(_fixture.DatasetId); var table = dataset.GetTable(_fixture.HighScoreTableId); var rows = new[] { BuildRow("Jenny", 125, new DateTime(2012, 5, 22, 1, 20, 30, DateTimeKind.Utc)), BuildRow("Lisa", 90, new DateTime(2011, 10, 12, 0, 0, 0, DateTimeKind.Utc)) }; var insertResult = _fixture.InsertAndWait(table, () => table.InsertRows(rows), 2); AssertAllRowsInserted(insertResult); var rowsAfter = table.ListRows().ToList(); Assert.Contains(rowsAfter, r => (string)r["player"] == "Jenny"); Assert.Contains(rowsAfter, r => (string)r["player"] == "Lisa"); }
public async Task PatchTableAsync() { var client = BigQueryClient.Create(_fixture.ProjectId); string datasetId = _fixture.DatasetId; var tableId = _fixture.CreateTableId(); var original = await client.CreateTableAsync(datasetId, tableId, new Table { Description = "Description1", FriendlyName = "FriendlyName1" }); var patched = await original.PatchAsync(new Table { Description = "Description2" }, matchETag : false); // Check the results of the patch Assert.Equal("Description2", patched.Resource.Description); Assert.Equal("FriendlyName1", patched.Resource.FriendlyName); // Check that it's still valid if fetched directly var fetched = await client.GetTableAsync(datasetId, tableId); Assert.Equal("Description2", fetched.Resource.Description); Assert.Equal("FriendlyName1", fetched.Resource.FriendlyName); Assert.Equal(fetched.Resource.ETag, patched.Resource.ETag); }
public static void BigQueryClean(string sProjectID, string sDatasetId, GoogleCredential credential) { var bqClient = BigQueryClient.Create(sProjectID, credential); try { List <string> lstTblsFinal = new List <string>(); foreach (var table in bqClient.ListTables(sDatasetId).ToList <BigQueryTable>()) { if (table.Resource.Type == "TABLE") { lstTblsFinal.Add(table.Reference.TableId); } } lstTblsFinal.RemoveAll(item => !lstTblsAll.Contains(item)); foreach (var tblNoBorrar in lstTblsNoBorrar) { lstTblsFinal.Remove(tblNoBorrar); } if (lstTblsFinal.Count() > 0) { foreach (var tbl in lstTblsFinal) { bqClient.DeleteTable(sDatasetId, tbl); Trace.WriteLine(string.Format("Se borró la siguiente tabla en Bq: {0}", tbl)); } } else { Trace.WriteLine("No se encontraron tablas en el Dataset"); } twl.Flush(); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
public void QueryWithArrayParameters(string projectId = "your-project-id") { var gender = "M"; string[] states = { "WA", "WI", "WV", "WY" }; // Note: Standard SQL is required to use query parameters. var query = @" SELECT name, sum(number) as count FROM `bigquery-public-data.usa_names.usa_1910_2013` WHERE gender = @gender AND state IN UNNEST(@states) GROUP BY name ORDER BY count DESC LIMIT 10;"; // Initialize client that will be used to send requests. var client = BigQueryClient.Create(projectId); var parameters = new BigQueryParameter[] { new BigQueryParameter("gender", BigQueryDbType.String, gender), new BigQueryParameter("states", BigQueryDbType.Array, states) }; var job = client.CreateQueryJob( sql: query, parameters: parameters, options: new QueryOptions { UseQueryCache = false }); // Wait for the job to complete. job = job.PollUntilCompleted().ThrowOnAnyError(); // Display the results foreach (BigQueryRow row in client.GetQueryResults(job.Reference)) { Console.WriteLine($"{row["name"]}: {row["count"]}"); } }
public async Task ListRows_PartialSchema() { var client = BigQueryClient.Create(_fixture.ProjectId); string datasetId = _fixture.DatasetId; string tableId = _fixture.PeopleTableId; var options = new GetTableOptions { SelectedFields = "age,gender,children.gender,children.age" }; // Obtain the table's partial schema. var table = client.GetTable(datasetId, tableId, options); // Use the partial schema to obtain partial rows. // We are testing both the sync and async versions. var rowsAsync = client.ListRowsAsync(datasetId, tableId, table.Schema); var rows = client.ListRows(datasetId, tableId, table.Schema); // Make sure we grab a row of a person with children for testing fields that should be present. var rowAsyncTask = rowsAsync.FirstAsync(row => ((Dictionary <string, object>[])row["children"])?.Length > 0); var rowSync = rows.First(row => ((Dictionary <string, object>[])row["children"])?.Length > 0); var rowAsync = await rowAsyncTask.ConfigureAwait(false); AssertPartialRow(rowSync); AssertPartialRow(rowAsync); void AssertPartialRow(BigQueryRow row) { // These should be present Assert.NotNull(row["age"]); Assert.NotNull(row["gender"]); var children = row["children"] as Dictionary <string, object>[]; Assert.NotNull(children); Assert.NotNull(children[0]["gender"]); Assert.NotNull(children[0]["age"]); // These shouldn't Assert.Throws <KeyNotFoundException>(() => row["fullName"]); Assert.Throws <KeyNotFoundException>(() => children[0]["name"]); } }
public void PatchDataset_ConflictMatchEtag() { var client = BigQueryClient.Create(_fixture.ProjectId); var id = _fixture.CreateDatasetId(); var original = client.CreateDataset(id, new Dataset { Description = "Description1", FriendlyName = "FriendlyName1" }); // Modify on the server, which will change the etag var sneaky = client.GetDataset(id); sneaky.Resource.FriendlyName = "Sneak attack!"; sneaky.Update(); // Fails due to the conflict. var exception = Assert.Throws <GoogleApiException>(() => original.Patch(new Dataset { Description = "Description2" }, matchETag: true)); Assert.Equal(HttpStatusCode.PreconditionFailed, exception.HttpStatusCode); }
public void InsertRow_BadData_Silent(InsertOptions options) { var client = BigQueryClient.Create(_fixture.ProjectId); var dataset = client.GetDataset(_fixture.DatasetId); // Don't insert into a table used by other tests... var table = dataset.CreateTable( _fixture.CreateTableId(), new TableSchemaBuilder { { "year", BigQueryDbType.Int64 } }.Build()); var rows = new BigQueryInsertRow[] { new BigQueryInsertRow { { "noSuchField", 10 } }, new BigQueryInsertRow { { "year", "Unknown" } } }; table.InsertRows(rows, options); }
public void ListDatasets( string projectId = "your-project-id" ) { BigQueryClient client = BigQueryClient.Create(projectId); // Retrieve list of datasets in project List <BigQueryDataset> datasets = client.ListDatasets().ToList(); // Display the results if (datasets.Count > 0) { Console.WriteLine($"Datasets in project {projectId}:"); foreach (var dataset in datasets) { Console.WriteLine($"\t{dataset.Reference.DatasetId}"); } } else { Console.WriteLine($"{projectId} does not contain any datasets."); } }
public void PatchDataset() { var client = BigQueryClient.Create(_fixture.ProjectId); var id = _fixture.CreateDatasetId(); var original = client.CreateDataset(id, new CreateDatasetOptions { Description = "Description1", FriendlyName = "FriendlyName1" }); var patched = original.Patch(new Dataset { Description = "Description2" }, matchEtag: false); // Check the results of the patch Assert.Equal("Description2", patched.Resource.Description); Assert.Equal("FriendlyName1", patched.Resource.FriendlyName); // Check that it's still valid if fetched directly var fetched = client.GetDataset(id); Assert.Equal("Description2", fetched.Resource.Description); Assert.Equal("FriendlyName1", fetched.Resource.FriendlyName); }
public AssetFixture() { ProjectId = Environment.GetEnvironmentVariable("GOOGLE_PROJECT_ID"); if (string.IsNullOrEmpty(ProjectId)) { throw new Exception("missing GOOGLE_PROJECT_ID"); } _bucketFixture = new RandomBucketFixture(); BucketName = _bucketFixture.BucketName; _bigQueryClient = BigQueryClient.Create(ProjectId); DatasetId = RandomDatasetId(); var dataset = new Dataset { Location = "US" }; _bigQueryClient.CreateDataset(datasetId: DatasetId, dataset); // Wait 10 seconds to let resource creation events go to CAI. Thread.Sleep(10000); }
public void PatchModel_ConflictMatchEtag() { var client = BigQueryClient.Create(_fixture.ProjectId); string datasetId = _fixture.DatasetId; var modelId = _fixture.ModelId; var original = client.GetModel(datasetId, modelId); // Modify on the server, which will change the etag var sneaky = client.GetModel(datasetId, modelId); sneaky = sneaky.Patch(new Model { Description = "Sneak attack!" }, matchETag: false); // Fails due to the conflict. var exception = Assert.Throws <GoogleApiException>(() => original.Patch(new Model { Description = "Description" }, matchETag: true)); Assert.Equal(HttpStatusCode.PreconditionFailed, exception.HttpStatusCode); }
public void UploadOrc() { var client = BigQueryClient.Create(_fixture.ProjectId); var tableReference = client.GetTableReference(_fixture.DatasetId, _fixture.CreateTableId()); var typeInfo = typeof(UploadTest).GetTypeInfo(); string resourceName = typeInfo.Namespace + ".us-states.orc"; using (var stream = typeInfo.Assembly.GetManifestResourceStream(resourceName)) { var job = client.UploadOrc(tableReference, stream); job = job.PollUntilCompleted(); job.ThrowOnAnyError(); } var table = client.GetTable(tableReference); var rows = table.ListRows().ToList(); Assert.Equal(50, rows.Count); var fields = table.Schema.Fields.Select(f => new { f.Name, f.Type, f.Mode }).OrderBy(f => f.Name).ToList(); Assert.Equal(new { Name = "name", Type = "STRING", Mode = "NULLABLE" }, fields[0]); Assert.Equal(new { Name = "post_abbr", Type = "STRING", Mode = "NULLABLE" }, fields[1]); }
public void TimestampParameter_Legacy() { var client = BigQueryClient.Create(_fixture.ProjectId); var table = client.GetTable(_fixture.DatasetId, _fixture.HighScoreTableId); var command = new BigQueryCommand($"SELECT score FROM {table} WHERE player=@player AND gameStarted > @start") { Parameters = { { "player", BigQueryDbType.String, "Angela" }, { "start", BigQueryDbType.Timestamp, new DateTime(2001, 12, 31, 23, 59, 59, DateTimeKind.Utc)}, } }; // Find the value when we've provided a timestamp smaller than the actual value var results = client.ExecuteQuery(command).ReadPage(10); Assert.Equal(1, results.Rows.Count); // We shouldn't find it now. (Angela's game started at 2002-01-01T00:00:00Z) command.Parameters[1].Value = new DateTime(2002, 1, 1, 0, 0, 1, DateTimeKind.Utc); results = client.ExecuteQuery(command).ReadPage(10); Assert.Equal(0, results.Rows.Count); }
public void ClearDatasetLabels_Simple(bool runAsync) { var client = BigQueryClient.Create(_fixture.ProjectId); var datasetId = _fixture.LabelsDatasetId; _fixture.SetUpLabels(new Dictionary <string, string> { ["lab1"] = "o1", ["lab2"] = "o2" }); var previousLabels = RunMaybeAsync(runAsync, () => client.ClearDatasetLabels(datasetId), () => client.ClearDatasetLabelsAsync(datasetId)); var expected = new Dictionary <string, string> { ["lab1"] = "o1", ["lab2"] = "o2" }; Assert.Equal(expected, previousLabels); var finalLabels = client.GetDataset(datasetId).Resource.Labels; Assert.Null(finalLabels); }
private static void Main(string[] args) { string projectId = null; if (args.Length == 0) { Console.WriteLine(usage); } else { projectId = args[0]; // [START setup] // By default, the Google.Cloud.BigQuery.V2 library client will authenticate // using the service account file (created in the Google Developers // Console) specified by the GOOGLE_APPLICATION_CREDENTIALS // environment variable. If you are running on // a Google Compute Engine VM, authentication is completely // automatic. var client = BigQueryClient.Create(projectId); // [END setup] // [START query] var table = client.GetTable("bigquery-public-data", "samples", "shakespeare"); string query = $@"SELECT corpus AS title, COUNT(*) AS unique_words FROM `{table.FullyQualifiedId}` GROUP BY title ORDER BY unique_words DESC LIMIT 42"; var result = client.ExecuteQuery(query); // [END query] // [START print_results] Console.Write("\nQuery Results:\n------------\n"); foreach (var row in result) { Console.WriteLine($"{row["title"]}: {row["unique_words"]}"); } // [END print_results] } Console.WriteLine("\nPress any key..."); Console.ReadKey(); }
public IActionResult InsertBQData(string username, string userplan, string customerId) { try { string projectId = "task8-imagerecognition-281418"; DateTime now = DateTime.Now; Console.WriteLine("email: " + username); Console.WriteLine("plan: " + userplan); BigQueryClient client = BigQueryClient.Create(projectId); // Create the dataset if it doesn't exist. BigQueryDataset dataset = client.GetOrCreateDataset("mydata"); // Create the table if it doesn't exist. BigQueryTable table = dataset.GetOrCreateTable("customer", new TableSchemaBuilder { { "email", BigQueryDbType.String }, { "subscriptionStarted", BigQueryDbType.Timestamp }, { "planType", BigQueryDbType.String } //BigQueryDbType.Int64 }.Build()); //Insert data into table table.InsertRow(new BigQueryInsertRow { { "email", username }, { "subscriptionStarted", DateTimeOffset.UtcNow.ToUnixTimeSeconds() }, { "planType", userplan } }); Console.WriteLine("Inserted: " + username + " successfully"); return(Redirect("/Home/Main/" + username + "/" + userplan + "/" + customerId + "?Msg=Success")); }// End of try catch (Exception ex) { Console.WriteLine("Error: " + ex.Message); return(Redirect("/Home/Main/" + username + "/" + userplan + "/" + customerId + "?Msg=Failed")); } }// End of insertBQData
public void QueryParameters() { var client = BigQueryClient.Create(_fixture.ProjectId); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.String, "foo")); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Bool, true)); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Int64, 123456L)); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Float64, 123.75)); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Bytes, new byte[] { 1, 2, 3, 4 })); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Date, new DateTime(2017, 2, 14))); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.DateTime, new DateTime(2017, 2, 14, 17, 25, 30, DateTimeKind.Unspecified))); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Timestamp, new DateTime(2017, 2, 14, 17, 25, 30, DateTimeKind.Utc))); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Time, new TimeSpan(0, 1, 2, 3, 456))); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { "foo", "bar" })); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { true, false })); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { 123456L, -1234L })); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { 123.75, 10.5 })); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { new byte[] { 1, 2, 3, 4 }, new byte[] { 255, 254, 253, 252 } })); // Date/DateTime/Timestamp arrays need to be given the types explicitly. AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { new DateTime(2017, 2, 14), new DateTime(2017, 2, 15) }) { ArrayType = BigQueryDbType.Date }); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { new DateTime(2017, 2, 14, 17, 25, 30, DateTimeKind.Unspecified), new DateTime(2017, 2, 15, 17, 25, 30, DateTimeKind.Unspecified) }) { ArrayType = BigQueryDbType.DateTime }); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { new DateTime(2017, 2, 14, 17, 25, 30, DateTimeKind.Utc), new DateTime(2017, 2, 15, 17, 25, 30, DateTimeKind.Utc) }) { ArrayType = BigQueryDbType.Timestamp }); AssertParameterRoundTrip(client, new BigQueryParameter(BigQueryDbType.Array, new[] { new TimeSpan(0, 1, 2, 3, 456), new TimeSpan(0, 23, 59, 59, 987) })); }
public void CreateTable_TableSameReference() { var client = BigQueryClient.Create(_fixture.ProjectId); string datasetId = _fixture.DatasetId; var tableId = _fixture.CreateTableId(); var created = client.CreateTable( datasetId, tableId, new Table { TableReference = new TableReference { ProjectId = _fixture.ProjectId, DatasetId = _fixture.DatasetId, TableId = tableId } }); Assert.Equal(_fixture.ProjectId, created.Reference.ProjectId); Assert.Equal(datasetId, created.Reference.DatasetId); Assert.Equal(tableId, created.Reference.TableId); }
public async Task GetOrCreateTableAsync_TableSameReference() { var client = BigQueryClient.Create(_fixture.ProjectId); string datasetId = _fixture.DatasetId; var tableId = _fixture.CreateTableId(); var created = await client.GetOrCreateTableAsync( datasetId, tableId, resource : new Table { TableReference = new TableReference { ProjectId = _fixture.ProjectId, DatasetId = _fixture.DatasetId, TableId = tableId } }); Assert.Equal(_fixture.ProjectId, created.Reference.ProjectId); Assert.Equal(datasetId, created.Reference.DatasetId); Assert.Equal(tableId, created.Reference.TableId); }
public void InsertRows() { var client = BigQueryClient.Create(_fixture.ProjectId); var dataset = client.GetDataset(_fixture.DatasetId); var table = dataset.GetTable(_fixture.HighScoreTableId); var countBefore = table.ListRows().Count(); var rows = new[] { BuildRow("Jenny", 125, new DateTime(2012, 5, 22, 1, 20, 30, DateTimeKind.Utc)), BuildRow("Lisa", 90, new DateTime(2011, 10, 12, 0, 0, 0, DateTimeKind.Utc)) }; table.InsertRows(rows); var rowsAfter = table.ListRows(); Assert.True(rowsAfter.Any(r => (string)r["player"] == "Jenny")); Assert.True(rowsAfter.Any(r => (string)r["player"] == "Lisa")); Assert.Equal(countBefore + 2, rowsAfter.Count()); }
public void Update() { string projectId = _fixture.ProjectId; string datasetId = _fixture.GameDatasetId; string tableId = _fixture.GenerateTableId(); BigQueryClient.Create(projectId).CreateTable(datasetId, tableId, new TableSchema()); // Snippet: Update(Table, *) BigQueryClient client = BigQueryClient.Create(projectId); BigQueryTable table = client.GetTable(datasetId, tableId); // This example modifies the in-memory resource in the BigQueryDataset, // and then applies that change in the server. Alternatively, pass a Dataset // into the Update method. table.Resource.FriendlyName = "Updated table"; BigQueryTable updated = table.Update(); Console.WriteLine($"Updated table friendly name: {updated.Resource.FriendlyName}"); // End snippet Assert.Equal("Updated table", updated.Resource.FriendlyName); }
public void NonQueryJob() { var client = BigQueryClient.Create(_fixture.ProjectId); string tableId = _fixture.CreateTableId(); string[] csvRows = { "Name,GameStarted,Score", "Ben,2014-08-19T12:41:35.220Z,85", "Lucy,2014-08-20T12:41:35.220Z,130", "Rohit,2014-08-21T12:41:35.220Z,90" }; var bytes = Encoding.UTF8.GetBytes(string.Join("\n", csvRows)); TableSchema schema = null; var job = client.UploadCsv(_fixture.DatasetId, tableId, schema, new MemoryStream(bytes), new UploadCsvOptions { Autodetect = true }); Assert.Throws <InvalidOperationException>(() => job.GetQueryResults()); Assert.Throws <InvalidOperationException>(() => client.GetQueryResults(job.Reference)); }
public void NullParameter() { // Assumption: no other test inserts a row with a null player name. var client = BigQueryClient.Create(_fixture.ProjectId); var table = client.GetTable(_fixture.DatasetId, _fixture.HighScoreTableId); var parameter = new BigQueryParameter("player", BigQueryDbType.String, "Angela"); string sql = $"SELECT score FROM {table} WHERE player=@player"; var resultSet = client.ExecuteQuery(sql, new[] { parameter }).ReadPage(5); Assert.Equal(1, resultSet.Rows.Count); Assert.Equal(95, (long)resultSet.Rows[0]["score"]); // SQL rules: nothing equals null parameter.Value = null; resultSet = client.ExecuteQuery(sql, new[] { parameter }).ReadPage(5); Assert.Equal(0, resultSet.Rows.Count); // But we should be able to find the null value this way. sql = $"SELECT score FROM {table} WHERE player=@player OR (player IS NULL AND @player IS NULL)"; resultSet = client.ExecuteQuery(sql, new[] { parameter }).ReadPage(5); Assert.Equal(1, resultSet.Rows.Count); Assert.Equal(1, (long)resultSet.Rows[0]["score"]); }