Exemple #1
0
        // Import CSV file from Google Storage bucket into existing Google BigQuery data table
        private void ImportFileFromStorageToBigQuery(BigqueryClient client, string projectName,
                                                     string bucketName, string fileName, string dataSetName, string tableName)
        {
            StorageClient gcsClient = StorageClient.Create(_GoogleAPICredential);

            using (var stream = new MemoryStream())
            {
                gcsClient.DownloadObject(bucketName, fileName, stream);

                // This uploads data to an existing table. If the upload will create a new table
                // or if the schema in the CSV isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigqueryJob job = null;
                try
                {
                    job = client.UploadCsv(dataSetName, tableName, null, stream);
                }
                catch (Exception e)
                {
                    string m = e.Message;
                }
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                try
                {
                    job.Poll();
                } catch (Exception e) {
                    string m = e.Message;
                }
            }
        }
Exemple #2
0
        // [END delete_table]

        // [START import_file_from_gcs]
        public void ImportDataFromCloudStorage(string projectId, string datasetId,
                                               string tableId, BigqueryClient client, string fileName, string folder = null)
        {
            StorageClient gcsClient = StorageClient.Create();

            using (var stream = new MemoryStream())
            {
                // Set Cloud Storage Bucket name. This uses a bucket named the same as the project.
                string bucket = projectId;
                // If folder is passed in, add it to Cloud Storage File Path using "/" character
                string filePath = string.IsNullOrEmpty(folder) ? fileName : folder + "/" + fileName;
                // Download Google Cloud Storage object into stream
                gcsClient.DownloadObject(projectId, filePath, stream);

                // This example uploads data to an existing table. If the upload will create a new table
                // or if the schema in the JSON isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigqueryJob job = client.UploadJson(datasetId, tableId, null, stream);
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                job.PollUntilCompleted();
            }
        }
Exemple #3
0
        public void PopulateTable(
            string query, string datasetId, string newTableId, BigqueryClient client)
        {
            var         destination = client.GetTableReference(datasetId, newTableId);
            BigqueryJob job         = client.CreateQueryJob(query,
                                                            new CreateQueryJobOptions {
                DestinationTable = destination
            });

            // Wait for the job to complete.
            job.PollQueryUntilCompleted();
        }
        public void UploadCsv()
        {
            string projectId = _fixture.ProjectId;
            string datasetId = _fixture.GameDatasetId;
            string tableId   = _fixture.HistoryTableId;

            BigqueryTable table      = BigqueryClient.Create(projectId).GetTable(datasetId, tableId);
            int           rowsBefore = table.ListRows().Rows.Count();

            // Snippet: UploadCsv(*,*,*,*,*)
            BigqueryClient client = BigqueryClient.Create(projectId);

            string[] csvRows =
            {
                "player,score,level,game_started",
                "Tim,5000,3,2014-08-19T12:41:35.220Z",
                "Holly,6000,4,2014-08-03T08:45:35.123Z",
                "Jane,2402,1,2015-01-20T10:13:35.059Z"
            };

            // Normally we'd be uploading from a file or similar. Any readable stream can be used.
            var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", csvRows)));

            // This example uploads data to an existing table. If the upload will create a new table
            // or if the schema in the CSV isn't identical to the schema in the table (for example if the
            // columns are in a different order), create a schema to pass into the call.
            TableSchema schema = null;
            BigqueryJob job    = client.UploadCsv(datasetId, tableId, schema, stream,
                                                  // Our sample data has a header row, so we need to skip it.
                                                  new UploadCsvOptions {
                SkipLeadingRows = 1
            });
            // Use the job to find out when the data has finished being inserted into the table,
            // report errors etc.
            // End snippet

            var result = job.Poll();

            // If there are any errors, display them *then* fail.
            if (result.Status.ErrorResult != null)
            {
                foreach (var error in result.Status.Errors)
                {
                    Console.WriteLine(error.Message);
                }
            }
            Assert.Null(result.Status.ErrorResult);

            int rowsAfter = table.ListRows().Rows.Count();

            Assert.Equal(rowsBefore + 3, rowsAfter);
        }
Exemple #5
0
        // [START copy_table]
        public void CopyTable(
            string datasetId, string tableIdToBeCopied, string newTableId, BigqueryClient client)
        {
            var         table       = client.GetTable(datasetId, tableIdToBeCopied);
            string      query       = $"SELECT * FROM {table}";
            var         destination = client.GetTableReference(datasetId, newTableId);
            BigqueryJob job         = client.CreateQueryJob(query,
                                                            new CreateQueryJobOptions {
                DestinationTable = destination
            });

            // Wait for the job to complete.
            job.PollQueryUntilCompleted();
        }
Exemple #6
0
        // [END async_query]


        // [START import_from_file]
        public void UploadJsonFromFile(string projectId, string datasetId, string tableId,
                                       string fileName, BigqueryClient client)
        {
            using (FileStream stream = File.Open(fileName, FileMode.Open))
            {
                // This example uploads data to an existing table. If the upload will create a new table
                // or if the schema in the JSON isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigqueryJob job = client.UploadJson(datasetId, tableId, null, stream);
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                job.PollUntilCompleted();
            }
        }
Exemple #7
0
        // [END sync_query_legacy_sql]

        // [START async_query]
        public BigqueryQueryJob AsyncQuery(string projectId, string datasetId, string tableId,
                                           string query, BigqueryClient client)
        {
            var         table = client.GetTable(projectId, datasetId, tableId);
            BigqueryJob job   = client.CreateQueryJob(query,
                                                      new CreateQueryJobOptions {
                UseQueryCache = false
            });

            // Wait for the job to complete.
            job.PollUntilCompleted();

            // Then we can fetch the results, either via the job or by accessing
            // the destination table.
            return(client.GetQueryResults(job.Reference.JobId));
        }
Exemple #8
0
        // [END sync_query]

        // [START sync_query_legacy_sql]
        public BigqueryQueryJob LegacySqlSyncQuery(string projectId, string datasetId,
                                                   string tableId, string query, double timeoutMs, BigqueryClient client)
        {
            var         table = client.GetTable(projectId, datasetId, tableId);
            BigqueryJob job   = client.CreateQueryJob(query,
                                                      new CreateQueryJobOptions {
                UseLegacySql = true
            });
            // Get the query result, waiting for the timespan specified in milliseconds.
            BigqueryQueryJob result = client.GetQueryResults(job.Reference.JobId,
                                                             new GetQueryResultsOptions {
                Timeout = TimeSpan.FromMilliseconds(timeoutMs)
            });

            return(result);
        }
        public void UploadJson()
        {
            string projectId = _fixture.ProjectId;
            string datasetId = _fixture.GameDatasetId;
            string tableId   = _fixture.HistoryTableId;

            BigqueryTable table      = BigqueryClient.Create(projectId).GetTable(datasetId, tableId);
            int           rowsBefore = table.ListRows().Rows.Count();

            // Snippet: UploadJson(*,*,*,*,*)
            BigqueryClient client = BigqueryClient.Create(projectId);
            // Note that there's a single line per JSON object. This is not a JSON array.
            IEnumerable <string> jsonRows = new string[]
            {
                "{ 'player': 'John', 'score': 50, 'level': 1, 'game_started': '2014-08-19 12:41:35.220' }",
                "{ 'player': 'Zoe', 'score': 605, 'level': 1, 'game_started': '2016-01-01 08:30:35.000' }",
            }.Select(row => row.Replace('\'', '"')); // Simple way of representing C# in JSON to avoid escaping " everywhere.

            // Normally we'd be uploading from a file or similar. Any readable stream can be used.
            var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", jsonRows)));

            // This example uploads data to an existing table. If the upload will create a new table
            // or if the schema in the JSON isn't identical to the schema in the table,
            // create a schema to pass into the call.
            TableSchema schema = null;
            BigqueryJob job    = client.UploadJson(datasetId, tableId, schema, stream);
            // Use the job to find out when the data has finished being inserted into the table,
            // report errors etc.
            // End snippet

            var result = job.Poll();

            // If there are any errors, display them *then* fail.
            if (result.Status.ErrorResult != null)
            {
                foreach (var error in result.Status.Errors)
                {
                    Console.WriteLine(error.Message);
                }
            }
            Assert.Null(result.Status.ErrorResult);

            int rowsAfter = table.ListRows().Rows.Count();

            Assert.Equal(rowsBefore + 2, rowsAfter);
        }
        public void CreateQueryJob()
        {
            var projectId      = _fixture.ProjectId;
            var datasetId      = _fixture.GameDatasetId;
            var historyTableId = _fixture.HistoryTableId;
            var queryTableId   = Guid.NewGuid().ToString().Replace('-', '_');

            // Snippet: CreateQueryJob(*,*)
            BigqueryClient client      = BigqueryClient.Create(projectId);
            BigqueryTable  table       = client.GetTable(datasetId, historyTableId);
            TableReference destination = client.GetTableReference(datasetId, queryTableId);
            // If the destination table is not specified, the results will be stored in
            // a temporary table.
            BigqueryJob job = client.CreateQueryJob(
                $@"SELECT player, MAX(score) AS score
                   FROM {table}
                   GROUP BY player
                   ORDER BY score DESC",
                new CreateQueryJobOptions {
                DestinationTable = destination
            });

            // Wait for the job to complete.
            job.Poll();

            // Then we can fetch the results, either via the job or by accessing
            // the destination table.
            BigqueryResult result = client.GetQueryResults(job.Reference);

            foreach (var row in result.Rows)
            {
                Console.WriteLine($"{row["player"]}: {row["score"]}");
            }
            // End snippet

            var players = result.Rows.Select(r => (string)r["player"]).ToList();

            Assert.Contains("Ben", players);
            Assert.Contains("Nadia", players);
            Assert.Contains("Tim", players);
        }
Exemple #11
0
        // [END import_from_file]

        // [START stream_row]
        public void UploadJson(string datasetId, string tableId, BigqueryClient client)
        {
            // Note that there's a single line per JSON object. This is not a JSON array.
            IEnumerable <string> jsonRows = new string[]
            {
                "{ 'title': 'exampleJsonFromStream', 'unique_words': 1}",
                "{ 'title': 'moreExampleJsonFromStream', 'unique_words': 1}",
                //add more rows here...
            }.Select(row => row.Replace('\'', '"')); // Simple way of representing C# in JSON to avoid escaping " everywhere.

            // Normally we'd be uploading from a file or similar. Any readable stream can be used.
            var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", jsonRows)));

            // This example uploads data to an existing table. If the upload will create a new table
            // or if the schema in the JSON isn't identical to the schema in the table,
            // create a schema to pass into the call instead of passing in a null value.
            BigqueryJob job = client.UploadJson(datasetId, tableId, null, stream);

            // Use the job to find out when the data has finished being inserted into the table,
            // report errors etc.

            // Wait for the job to complete.
            job.PollUntilCompleted();
        }