public void ModifyRequest() { var options = new UploadCsvOptions { AllowJaggedRows = true, AllowQuotedNewlines = true, AllowTrailingColumns = true, CreateDisposition = CreateDisposition.CreateIfNeeded, FieldDelimiter = "!", MaxBadRecords = 10, Quote = "'", SkipLeadingRows = 5, WriteDisposition = WriteDisposition.WriteAppend, Autodetect = true, NullMarker = "custom-null", TimePartitioning = TimePartition.CreateDailyPartitioning(expiration: null) }; JobConfigurationLoad config = new JobConfigurationLoad(); options.ModifyConfiguration(config); Assert.Equal(true, config.AllowJaggedRows); Assert.Equal(true, config.AllowQuotedNewlines); Assert.Equal(true, config.IgnoreUnknownValues); Assert.Equal("CREATE_IF_NEEDED", config.CreateDisposition); Assert.Equal("!", config.FieldDelimiter); Assert.Equal(10, config.MaxBadRecords); Assert.Equal("'", config.Quote); Assert.Equal(5, config.SkipLeadingRows); Assert.Equal("WRITE_APPEND", config.WriteDisposition); Assert.True(config.Autodetect); Assert.Equal("custom-null", config.NullMarker); Assert.Equal("DAY", config.TimePartitioning.Type); Assert.Null(config.TimePartitioning.ExpirationMs); }
public void ModifyRequest() { var options = new UploadCsvOptions { AllowJaggedRows = true, AllowQuotedNewlines = true, AllowTrailingColumns = true, CreateDisposition = CreateDisposition.CreateIfNeeded, FieldDelimiter = "!", MaxBadRecords = 10, Quote = "'", SkipLeadingRows = 5, WriteDisposition = WriteDisposition.WriteAppend }; JobConfigurationLoad config = new JobConfigurationLoad(); options.ModifyConfiguration(config); Assert.Equal(true, config.AllowJaggedRows); Assert.Equal(true, config.AllowQuotedNewlines); Assert.Equal(true, config.IgnoreUnknownValues); Assert.Equal("CREATE_IF_NEEDED", config.CreateDisposition); Assert.Equal("!", config.FieldDelimiter); Assert.Equal(10, config.MaxBadRecords); Assert.Equal("'", config.Quote); Assert.Equal(5, config.SkipLeadingRows); Assert.Equal("WRITE_APPEND", config.WriteDisposition); }
public void LoadFromFile( string projectId = "your-project-id", string datasetId = "your_dataset_id", string tableId = "your_table_id", string filePath = "path/to/file.csv" ) { BigQueryClient client = BigQueryClient.Create(projectId); // Create job configuration var uploadCsvOptions = new UploadCsvOptions() { SkipLeadingRows = 1, // Skips the file headers Autodetect = true }; using (FileStream stream = File.Open(filePath, FileMode.Open)) { // Create and run job // Note that there are methods available for formats other than CSV BigQueryJob job = client.UploadCsv( datasetId, tableId, null, stream, uploadCsvOptions); job = job.PollUntilCompleted().ThrowOnAnyError(); // Waits for the job to complete. // Display the number of rows uploaded BigQueryTable table = client.GetTable(datasetId, tableId); Console.WriteLine( $"Loaded {table.Resource.NumRows} rows to {table.FullyQualifiedId}"); } }
public void CreateJob_BothJobIdAndPrefix() { var options = new UploadCsvOptions { JobIdPrefix = "prefix", JobId = "id" }; Assert.Throws <ArgumentException>(() => CreateJobWithSampleConfiguration(options)); }
protected override void ProcessRecord() { Project = InputObject.ProjectId; try { using (Stream fileInput = File.OpenRead(Filename)) { BigQueryJob bqj; switch (Type) { case DataFormats.AVRO: UploadAvroOptions AvroOptions = new UploadAvroOptions(); AvroOptions.WriteDisposition = WriteMode; AvroOptions.AllowUnknownFields = AllowUnknownFields; bqj = Client.UploadAvro(InputObject, null, fileInput, AvroOptions); break; case DataFormats.JSON: UploadJsonOptions JsonOptions = new UploadJsonOptions(); JsonOptions.WriteDisposition = WriteMode; JsonOptions.AllowUnknownFields = AllowUnknownFields; bqj = Client.UploadJson(InputObject, null, fileInput, JsonOptions); break; case DataFormats.CSV: UploadCsvOptions CsvOptions = new UploadCsvOptions(); CsvOptions.WriteDisposition = WriteMode; CsvOptions.AllowJaggedRows = AllowJaggedRows; CsvOptions.AllowQuotedNewlines = AllowQuotedNewlines; CsvOptions.AllowTrailingColumns = AllowUnknownFields; CsvOptions.FieldDelimiter = FieldDelimiter; CsvOptions.Quote = Quote; CsvOptions.SkipLeadingRows = SkipLeadingRows; bqj = Client.UploadCsv(InputObject, null, fileInput, CsvOptions); break; default: throw UnknownParameterSetException; } bqj.PollUntilCompleted().ThrowOnAnyError(); } } catch (IOException ex) { ThrowTerminatingError(new ErrorRecord(ex, $"Error while reading file '{Filename}'.", ErrorCategory.ReadError, Filename)); return; } catch (Exception ex) { ThrowTerminatingError(new ErrorRecord(ex, $"Error while uploading file '{Filename}' to table '{InputObject.TableId}'.", ErrorCategory.WriteError, Filename)); } }
public void CreateJob_Labels() { var options = new UploadCsvOptions { Labels = new Dictionary <string, string>() { { "one_label", "one-label-value" }, { "another-label_2", "label_value_2" } } }; var job = CreateJobWithSampleConfiguration(options); Assert.Equal(2, job.Configuration.Labels.Count); Assert.Equal("one-label-value", job.Configuration.Labels["one_label"]); Assert.Equal("label_value_2", job.Configuration.Labels["another-label_2"]); }
public void UploadCsvEquivalents() { var datasetId = "dataset"; var tableId = "table"; var jobReference = GetJobReference("job"); var tableReference = GetTableReference(datasetId, tableId); var schema = new TableSchemaBuilder().Build(); var options = new UploadCsvOptions(); var stream = new MemoryStream(); VerifyEquivalent(new BigQueryJob(new DerivedBigQueryClient(), new Job { JobReference = jobReference }), client => client.UploadCsv(MatchesWhenSerialized(tableReference), schema, stream, options), client => client.UploadCsv(datasetId, tableId, schema, stream, options), client => client.UploadCsv(ProjectId, datasetId, tableId, schema, stream, options), client => new BigQueryTable(client, GetTable(tableReference, schema)).UploadCsv(stream, options)); }
public void ModifyRequest() { var options = new UploadCsvOptions { AllowJaggedRows = true, AllowQuotedNewlines = true, AllowTrailingColumns = true, CreateDisposition = CreateDisposition.CreateIfNeeded, FieldDelimiter = "!", MaxBadRecords = 10, Quote = "'", SkipLeadingRows = 5, WriteDisposition = WriteDisposition.WriteAppend, Autodetect = true, NullMarker = "custom-null", TimePartitioning = TimePartition.CreateDailyPartitioning(expiration: null), DestinationEncryptionConfiguration = new EncryptionConfiguration { KmsKeyName = "projects/1/locations/us/keyRings/1/cryptoKeys/1" }, DestinationSchemaUpdateOptions = SchemaUpdateOption.AllowFieldAddition | SchemaUpdateOption.AllowFieldRelaxation }; JobConfigurationLoad config = new JobConfigurationLoad(); options.ModifyConfiguration(config); Assert.Equal(true, config.AllowJaggedRows); Assert.Equal(true, config.AllowQuotedNewlines); Assert.Equal(true, config.IgnoreUnknownValues); Assert.Equal("CREATE_IF_NEEDED", config.CreateDisposition); Assert.Equal("!", config.FieldDelimiter); Assert.Equal(10, config.MaxBadRecords); Assert.Equal("'", config.Quote); Assert.Equal(5, config.SkipLeadingRows); Assert.Equal("WRITE_APPEND", config.WriteDisposition); Assert.True(config.Autodetect); Assert.Equal("custom-null", config.NullMarker); Assert.Equal("DAY", config.TimePartitioning.Type); Assert.Null(config.TimePartitioning.ExpirationMs); Assert.Equal("projects/1/locations/us/keyRings/1/cryptoKeys/1", config.DestinationEncryptionConfiguration.KmsKeyName); Assert.Equal(2, config.SchemaUpdateOptions.Count); Assert.Contains("ALLOW_FIELD_ADDITION", config.SchemaUpdateOptions); Assert.Contains("ALLOW_FIELD_RELAXATION", config.SchemaUpdateOptions); }
public void UploadCsv_PartitionedByField() { var client = BigQueryClient.Create(_fixture.ProjectId); string tableId = _fixture.CreateTableId(); var tableReference = client.GetTableReference(_fixture.DatasetId, _fixture.CreateTableId()); string[] csvRows = { "Name,GameStarted,Score", "Ben,2014-08-19T12:41:35.220Z,85", "Lucy,2014-08-20T12:41:35.220Z,130", "Rohit,2014-08-21T12:41:35.220Z,90" }; var bytes = Encoding.UTF8.GetBytes(string.Join("\n", csvRows)); TableSchema schema = new TableSchemaBuilder { { "Name", BigQueryDbType.String }, { "GameStarted", BigQueryDbType.Timestamp }, { "Score", BigQueryDbType.Int64 }, }.Build(); var options = new UploadCsvOptions { SkipLeadingRows = 1, TimePartitioning = TimePartition.CreateDailyPartitioning(expiration: null, "GameStarted") }; var job = client.UploadCsv(_fixture.DatasetId, tableId, schema, new MemoryStream(bytes), options); var result = job.PollUntilCompleted(); Assert.Null(result.Status.ErrorResult); var table = client.GetTable(_fixture.DatasetId, tableId); Assert.Equal("GameStarted", table.Resource.TimePartitioning.Field); }