public void ModifyRequest() { var options = new UploadJsonOptions { AllowUnknownFields = true, CreateDisposition = CreateDisposition.CreateIfNeeded, MaxBadRecords = 10, WriteDisposition = WriteDisposition.WriteAppend, Autodetect = true, TimePartitioning = TimePartition.CreateDailyPartitioning(expiration: null), DestinationEncryptionConfiguration = new EncryptionConfiguration { KmsKeyName = "projects/1/locations/us/keyRings/1/cryptoKeys/1" }, DestinationSchemaUpdateOptions = SchemaUpdateOption.AllowFieldAddition | SchemaUpdateOption.AllowFieldRelaxation }; JobConfigurationLoad config = new JobConfigurationLoad(); options.ModifyConfiguration(config); Assert.Equal(true, config.IgnoreUnknownValues); Assert.Equal("CREATE_IF_NEEDED", config.CreateDisposition); Assert.Equal(10, config.MaxBadRecords); Assert.Equal("WRITE_APPEND", config.WriteDisposition); Assert.True(config.Autodetect); Assert.Equal("DAY", config.TimePartitioning.Type); Assert.Null(config.TimePartitioning.ExpirationMs); Assert.Equal("projects/1/locations/us/keyRings/1/cryptoKeys/1", config.DestinationEncryptionConfiguration.KmsKeyName); Assert.Equal(2, config.SchemaUpdateOptions.Count); Assert.Contains("ALLOW_FIELD_ADDITION", config.SchemaUpdateOptions); Assert.Contains("ALLOW_FIELD_RELAXATION", config.SchemaUpdateOptions); }
protected override void ProcessRecord() { Project = InputObject.ProjectId; try { using (Stream fileInput = File.OpenRead(Filename)) { BigQueryJob bqj; switch (Type) { case DataFormats.AVRO: UploadAvroOptions AvroOptions = new UploadAvroOptions(); AvroOptions.WriteDisposition = WriteMode; AvroOptions.AllowUnknownFields = AllowUnknownFields; bqj = Client.UploadAvro(InputObject, null, fileInput, AvroOptions); break; case DataFormats.JSON: UploadJsonOptions JsonOptions = new UploadJsonOptions(); JsonOptions.WriteDisposition = WriteMode; JsonOptions.AllowUnknownFields = AllowUnknownFields; bqj = Client.UploadJson(InputObject, null, fileInput, JsonOptions); break; case DataFormats.CSV: UploadCsvOptions CsvOptions = new UploadCsvOptions(); CsvOptions.WriteDisposition = WriteMode; CsvOptions.AllowJaggedRows = AllowJaggedRows; CsvOptions.AllowQuotedNewlines = AllowQuotedNewlines; CsvOptions.AllowTrailingColumns = AllowUnknownFields; CsvOptions.FieldDelimiter = FieldDelimiter; CsvOptions.Quote = Quote; CsvOptions.SkipLeadingRows = SkipLeadingRows; bqj = Client.UploadCsv(InputObject, null, fileInput, CsvOptions); break; default: throw UnknownParameterSetException; } bqj.PollUntilCompleted().ThrowOnAnyError(); } } catch (IOException ex) { ThrowTerminatingError(new ErrorRecord(ex, $"Error while reading file '{Filename}'.", ErrorCategory.ReadError, Filename)); return; } catch (Exception ex) { ThrowTerminatingError(new ErrorRecord(ex, $"Error while uploading file '{Filename}' to table '{InputObject.TableId}'.", ErrorCategory.WriteError, Filename)); } }
public void UploadJson_Strings_Equivalents() { var datasetId = "dataset"; var tableId = "table"; var jobReference = GetJobReference("job"); var tableReference = GetTableReference(datasetId, tableId); var schema = new TableSchemaBuilder().Build(); var options = new UploadJsonOptions(); var rows = new[] { "a", "b" }; VerifyEquivalent(new BigQueryJob(new DerivedBigQueryClient(), new Job { JobReference = jobReference }), client => client.UploadJson(MatchesWhenSerialized(tableReference), schema, rows, options), client => client.UploadJson(datasetId, tableId, schema, rows, options), client => client.UploadJson(ProjectId, datasetId, tableId, schema, rows, options), client => new BigQueryTable(client, GetTable(tableReference, schema)).UploadJson(rows, options)); }
public void ModifyRequest() { var options = new UploadJsonOptions { AllowUnknownFields = true, CreateDisposition = CreateDisposition.CreateIfNeeded, MaxBadRecords = 10, WriteDisposition = WriteDisposition.WriteAppend }; JobConfigurationLoad config = new JobConfigurationLoad(); options.ModifyConfiguration(config); Assert.Equal(true, config.IgnoreUnknownValues); Assert.Equal("CREATE_IF_NEEDED", config.CreateDisposition); Assert.Equal(10, config.MaxBadRecords); Assert.Equal("WRITE_APPEND", config.WriteDisposition); }
public void UploadJson_Stream_AsyncEquivalents() { var datasetId = "dataset"; var tableId = "table"; var jobReference = GetJobReference("job"); var tableReference = GetTableReference(datasetId, tableId); var schema = new TableSchemaBuilder().Build(); var options = new UploadJsonOptions(); var token = new CancellationTokenSource().Token; var stream = new MemoryStream(); VerifyEquivalentAsync(new BigQueryJob(new DerivedBigQueryClient(), new Job { JobReference = jobReference }), client => client.UploadJsonAsync(MatchesWhenSerialized(tableReference), schema, stream, options, token), client => client.UploadJsonAsync(datasetId, tableId, schema, stream, options, token), client => client.UploadJsonAsync(ProjectId, datasetId, tableId, schema, stream, options, token), client => new BigQueryTable(client, GetTable(tableReference, schema)).UploadJsonAsync(stream, options, token)); }
public void ModifyRequest() { var options = new UploadJsonOptions { AllowUnknownFields = true, CreateDisposition = CreateDisposition.CreateIfNeeded, MaxBadRecords = 10, WriteDisposition = WriteDisposition.WriteAppend, Autodetect = true, TimePartitioning = TimePartition.CreateDailyPartitioning(expiration: null) }; JobConfigurationLoad config = new JobConfigurationLoad(); options.ModifyConfiguration(config); Assert.Equal(true, config.IgnoreUnknownValues); Assert.Equal("CREATE_IF_NEEDED", config.CreateDisposition); Assert.Equal(10, config.MaxBadRecords); Assert.Equal("WRITE_APPEND", config.WriteDisposition); Assert.True(config.Autodetect); Assert.Equal("DAY", config.TimePartitioning.Type); Assert.Null(config.TimePartitioning.ExpirationMs); }