Example #1
0
        public void UploadCsv_Autodetect()
        {
            var client = BigQueryClient.Create(_fixture.ProjectId);

            string tableId = _fixture.CreateTableId();

            string[] csvRows =
            {
                "Name,GameStarted,Score",
                "Ben,2014-08-19T12:41:35.220Z,85",
                "Lucy,2014-08-20T12:41:35.220Z,130",
                "Rohit,2014-08-21T12:41:35.220Z,90"
            };

            var bytes = Encoding.UTF8.GetBytes(string.Join("\n", csvRows));

            TableSchema schema = null;
            var         job    = client.UploadCsv(_fixture.DatasetId, tableId, schema, new MemoryStream(bytes), new UploadCsvOptions {
                Autodetect = true
            });
            var result = job.PollUntilCompleted();

            Assert.Null(result.Status.ErrorResult);

            var table = client.GetTable(_fixture.DatasetId, tableId);

            Assert.Equal(3, table.ListRows().Count());
            var fields = table.Schema.Fields.Select(f => new { f.Name, f.Type, f.Mode }).OrderBy(f => f.Name).ToList();

            Assert.Equal(fields[0], new { Name = "GameStarted", Type = "TIMESTAMP", Mode = "NULLABLE" });
            Assert.Equal(fields[1], new { Name = "Name", Type = "STRING", Mode = "NULLABLE" });
            Assert.Equal(fields[2], new { Name = "Score", Type = "INTEGER", Mode = "NULLABLE" });
        }
        public void UpdateTable()
        {
            var    client    = BigQueryClient.Create(_fixture.ProjectId);
            string datasetId = _fixture.DatasetId;
            var    tableId   = _fixture.CreateTableId();

            var original = client.CreateTable(datasetId, tableId, new TableSchema(), new CreateTableOptions {
                Description = "Description1", FriendlyName = "FriendlyName1"
            });

            // Modify locally...
            original.Resource.Description  = "Description2";
            original.Resource.FriendlyName = "FriendlyName2";

            // FIXME: I shouldn't need to do this.
            original.Resource.ETag = client.GetTable(datasetId, tableId).Resource.ETag;

            // Check the results of the update
            var updated = original.Update();

            Assert.Equal("Description2", updated.Resource.Description);
            Assert.Equal("FriendlyName2", updated.Resource.FriendlyName);

            // Check that it's still valid if fetched directly
            var fetched = client.GetTable(datasetId, tableId);

            Assert.Equal("Description2", fetched.Resource.Description);
            Assert.Equal("FriendlyName2", fetched.Resource.FriendlyName);
        }
Example #3
0
        public void InsertRow_BadData_Throws(InsertOptions options, int[] errorRowsIndexes)
        {
            var client  = BigQueryClient.Create(_fixture.ProjectId);
            var dataset = client.GetDataset(_fixture.DatasetId);
            // Don't insert into a table used by other tests...
            var table = dataset.CreateTable(
                _fixture.CreateTableId(),
                new TableSchemaBuilder {
                { "year", BigQueryDbType.Int64 }
            }.Build());
            var rows = new BigQueryInsertRow[]
            {
                new BigQueryInsertRow {
                    { "noSuchField", 10 }
                },
                new BigQueryInsertRow {
                    { "year", "Unknown" }
                }
            };
            var exception = Assert.Throws <GoogleApiException>(() => table.InsertRows(rows, options));

            Assert.Equal(errorRowsIndexes.Length, exception.Error.Errors.Count);
            foreach (var index in errorRowsIndexes)
            {
                Assert.Contains(exception.Error.Errors, e => e.Message.ToLower().Contains($"in row {index}"));
            }
        }
Example #4
0
        public void InsertRow_BadData_IgnoreBadData()
        {
            var client  = BigQueryClient.Create(_fixture.ProjectId);
            var dataset = client.GetDataset(_fixture.DatasetId);
            // Don't insert into a table used by other tests...
            var table = dataset.CreateTable(
                _fixture.CreateTableId(),
                new TableSchemaBuilder {
                { "name", BigQueryDbType.String }
            }.Build());

            Assert.Equal(0, table.ListRows().Count());
            var row = new BigQueryInsertRow {
                { "noSuchField", 10 }
            };

            table.InsertRow(row, new InsertOptions {
                AllowUnknownFields = true
            });

            // Check that we get the row. Use WaitForRows as
            // sometimes this seems to be not-completely-immediate.
            var command = new BigQueryCommand($"SELECT * FROM {table}");

            Assert.Equal(1, WaitForRows(client, command).Count());
        }
Example #5
0
        public void AsynchronousPermanentQuery()
        {
            // We create the client using our user, but then access a dataset in a public data
            // project. We can't run a query "as" the public data project.
            var projectId   = _fixture.ProjectId;
            var client      = BigQueryClient.Create(projectId);
            var table       = client.GetTable(PublicDatasetsProject, PublicDatasetsDataset, ShakespeareTable);
            var userDataset = client.GetDataset(_fixture.DatasetId);

            var sql = $"SELECT corpus as title, COUNT(word) as unique_words FROM {table} GROUP BY title ORDER BY unique_words DESC LIMIT 10";
            var destinationTable = userDataset.GetTableReference(_fixture.CreateTableId());
            var job = client.CreateQueryJob(sql, new CreateQueryJobOptions {
                DestinationTable = destinationTable
            });
            var rows = job.PollQueryUntilCompleted().GetRows().ToList();

            Assert.Equal(10, rows.Count);
            Assert.Equal("hamlet", (string)rows[0][0]);
            Assert.Equal(5318, (long)rows[0][1]);

            // Read the table again later - synchronously this time
            table = client.GetTable(destinationTable);
            rows  = client.ExecuteQuery($"SELECT * FROM {table} ORDER BY unique_words DESC").GetRows().ToList();
            Assert.Equal(10, rows.Count);
            Assert.Equal("hamlet", (string)rows[0][0]);
            Assert.Equal(5318, (long)rows[0][1]);
        }
Example #6
0
        public void EmptyTable()
        {
            var client = BigQueryClient.Create(_fixture.ProjectId);
            var schema = new TableSchemaBuilder {
                { "name", BigQueryDbType.String }
            }.Build();
            var table = client.CreateTable(_fixture.DatasetId, _fixture.CreateTableId(), schema);
            var rows  = table.ListRows().ToList();

            Assert.Empty(rows);
        }
        public void InsertRow_BadData_IgnoreBadData()
        {
            var client  = BigQueryClient.Create(_fixture.ProjectId);
            var dataset = client.GetDataset(_fixture.DatasetId);
            // Don't insert into a table used by other tests...
            var table = dataset.CreateTable(
                _fixture.CreateTableId(),
                new TableSchemaBuilder {
                { "name", BigQueryDbType.String }
            }.Build());

            var row = new BigQueryInsertRow {
                { "noSuchField", 10 }
            };

            var options = new InsertOptions {
                AllowUnknownFields = true
            };

            _fixture.InsertAndWait(table, () => table.InsertRow(row, options), 1);
        }
Example #8
0
        public void UploadAvro()
        {
            var client         = BigQueryClient.Create(_fixture.ProjectId);
            var tableId        = _fixture.CreateTableId();
            var tableReference = client.GetTableReference(_fixture.DatasetId, tableId);
            var schema         = new TableSchemaBuilder
            {
                { "re", BigQueryDbType.Int64 },
                { "im", BigQueryDbType.Int64 }
            }.Build();
            var    typeInfo     = typeof(UploadTest).GetTypeInfo();
            string resourceName = typeInfo.Namespace + ".one_complex.avro";

            using (var stream = typeInfo.Assembly.GetManifestResourceStream(resourceName))
            {
                var job = client.UploadAvro(tableReference, schema, stream);
                job.PollUntilCompleted();
            }
            var rows = client.GetTable(tableReference).ListRows().ToList();

            Assert.Equal(1, rows.Count);
            Assert.Equal(100, (long)rows[0]["re"]);
            Assert.Equal(200, (long)rows[0]["im"]);
        }
        public void InsertRow_BadData_Throws(InsertOptions options)
        {
            var client  = BigQueryClient.Create(_fixture.ProjectId);
            var dataset = client.GetDataset(_fixture.DatasetId);
            // Don't insert into a table used by other tests...
            var table = dataset.CreateTable(
                _fixture.CreateTableId(),
                new TableSchemaBuilder {
                { "year", BigQueryDbType.Int64 }
            }.Build());
            var rows = new BigQueryInsertRow[]
            {
                new BigQueryInsertRow {
                    { "noSuchField", 10 }
                },
                new BigQueryInsertRow {
                    { "year", "Unknown" }
                }
            };

            Assert.Throws <GoogleApiException>(() => table.InsertRows(rows, options));
        }
Example #10
0
        public void CopyJob_Labels()
        {
            var bqClient             = BigQueryClient.Create(_fixture.ProjectId);
            var originReference      = bqClient.GetTableReference(_fixture.DatasetId, _fixture.PeopleTableId);
            var destinationReference = bqClient.GetTableReference(_fixture.DatasetId, _fixture.CreateTableId());
            var options = new CreateCopyJobOptions {
                Labels = JobLabels
            };

            var copyJob = bqClient.CreateCopyJob(originReference, destinationReference, options);

            VerifyJobLabels(copyJob?.Resource?.Configuration?.Labels);

            copyJob = copyJob.PollUntilCompleted().ThrowOnAnyError();
            VerifyJobLabels(copyJob?.Resource?.Configuration?.Labels);
        }