Esempio n. 1
0
        // [END delete_table]

        // [START import_file_from_gcs]
        public void ImportDataFromCloudStorage(string projectId, string datasetId,
                                               string tableId, BigqueryClient client, string fileName, string folder = null)
        {
            StorageClient gcsClient = StorageClient.Create();

            using (var stream = new MemoryStream())
            {
                // Set Cloud Storage Bucket name. This uses a bucket named the same as the project.
                string bucket = projectId;
                // If folder is passed in, add it to Cloud Storage File Path using "/" character
                string filePath = string.IsNullOrEmpty(folder) ? fileName : folder + "/" + fileName;
                // Download Google Cloud Storage object into stream
                gcsClient.DownloadObject(projectId, filePath, stream);

                // This example uploads data to an existing table. If the upload will create a new table
                // or if the schema in the JSON isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigqueryJob job = client.UploadJson(datasetId, tableId, null, stream);
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                job.PollUntilCompleted();
            }
        }
Esempio n. 2
0
        // [END async_query]


        // [START import_from_file]
        public void UploadJsonFromFile(string projectId, string datasetId, string tableId,
                                       string fileName, BigqueryClient client)
        {
            using (FileStream stream = File.Open(fileName, FileMode.Open))
            {
                // This example uploads data to an existing table. If the upload will create a new table
                // or if the schema in the JSON isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigqueryJob job = client.UploadJson(datasetId, tableId, null, stream);
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                job.PollUntilCompleted();
            }
        }
Esempio n. 3
0
        // [END sync_query_legacy_sql]

        // [START async_query]
        public BigqueryQueryJob AsyncQuery(string projectId, string datasetId, string tableId,
                                           string query, BigqueryClient client)
        {
            var         table = client.GetTable(projectId, datasetId, tableId);
            BigqueryJob job   = client.CreateQueryJob(query,
                                                      new CreateQueryJobOptions {
                UseQueryCache = false
            });

            // Wait for the job to complete.
            job.PollUntilCompleted();

            // Then we can fetch the results, either via the job or by accessing
            // the destination table.
            return(client.GetQueryResults(job.Reference.JobId));
        }
Esempio n. 4
0
        // [END import_from_file]

        // [START stream_row]
        public void UploadJson(string datasetId, string tableId, BigqueryClient client)
        {
            // Note that there's a single line per JSON object. This is not a JSON array.
            IEnumerable <string> jsonRows = new string[]
            {
                "{ 'title': 'exampleJsonFromStream', 'unique_words': 1}",
                "{ 'title': 'moreExampleJsonFromStream', 'unique_words': 1}",
                //add more rows here...
            }.Select(row => row.Replace('\'', '"')); // Simple way of representing C# in JSON to avoid escaping " everywhere.

            // Normally we'd be uploading from a file or similar. Any readable stream can be used.
            var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", jsonRows)));

            // This example uploads data to an existing table. If the upload will create a new table
            // or if the schema in the JSON isn't identical to the schema in the table,
            // create a schema to pass into the call instead of passing in a null value.
            BigqueryJob job = client.UploadJson(datasetId, tableId, null, stream);

            // Use the job to find out when the data has finished being inserted into the table,
            // report errors etc.

            // Wait for the job to complete.
            job.PollUntilCompleted();
        }