コード例 #1
0
        // [END import_file_from_gcs]

        // [START sync_query]
        public BigQueryResults SyncQuery(string projectId, string datasetId, string tableId,
                                         string query, double timeoutMs, BigQueryClient client)
        {
            var         table = client.GetTable(projectId, datasetId, tableId);
            BigQueryJob job   = client.CreateQueryJob(query,
                                                      new CreateQueryJobOptions {
                UseQueryCache = false
            });
            // Get the query result, waiting for the timespan specified in milliseconds.
            BigQueryResults result = client.GetQueryResults(job.Reference.JobId,
                                                            new GetQueryResultsOptions {
                Timeout = TimeSpan.FromMilliseconds(timeoutMs)
            });

            return(result);
        }
コード例 #2
0
        // [END async_query]


        // [START import_from_file]
        public void UploadJsonFromFile(string projectId, string datasetId, string tableId,
                                       string fileName, BigQueryClient client)
        {
            using (FileStream stream = File.Open(fileName, FileMode.Open))
            {
                // This example uploads data to an existing table. If the upload will create a new table
                // or if the schema in the JSON isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigQueryJob job = client.UploadJson(datasetId, tableId, null, stream);
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                job.PollUntilCompleted();
            }
        }
コード例 #3
0
        // [END sync_query_legacy_sql]

        // [START async_query]
        public BigQueryResults AsyncQuery(string projectId, string datasetId, string tableId,
                                          string query, BigQueryClient client)
        {
            var         table = client.GetTable(projectId, datasetId, tableId);
            BigQueryJob job   = client.CreateQueryJob(query,
                                                      new CreateQueryJobOptions {
                UseQueryCache = false
            });

            // Wait for the job to complete.
            job.PollUntilCompleted();

            // Then we can fetch the results, either via the job or by accessing
            // the destination table.
            return(client.GetQueryResults(job.Reference.JobId));
        }
コード例 #4
0
        public static List <CHITIETTRAM> Update(string matram, string mo, string da)
        {
            List <CHITIETTRAM> re = new List <CHITIETTRAM>();

            // Add file json.
            using (BigQueryClient client = BigQueryClient.Create("phantantai", GoogleCredential.FromFile(@"C:\Users\ykdn1\OneDrive\Máy tính\oracle18.5\oracleteam3\oraclenhom3\oraclenhom3\App_Data\phantantai-c3450caeb9b5.json")))
            {
                string      query = $@"
				SELECT stn as MATRAM,da as DA,mo as MO,year as YEAR ,temp as NHIETDO,slp as APSUAT,wdsp as TOCDOGIO,max as TMAX,min as TMIN,prcp as LUONGMUA 
				FROM `bigquery-public-data.noaa_gsod.gsod2019` 
				where stn = '{matram}' and mo = '{mo}' and da = '{da}'"                ;
                BigQueryJob job   = client.CreateQueryJob(
                    sql: query,
                    parameters: null,
                    options: new QueryOptions {
                    UseQueryCache = false
                });
                // Wait for the job to complete.
                job.PollUntilCompleted();
                CHITIETTRAM chitie = new CHITIETTRAM();
                foreach (BigQueryRow row in client.GetQueryResults(job.Reference))
                {
                    chitie.MATRAM = int.Parse($"{row["MATRAM"]}");
                    chitie.MO     = byte.Parse($"{row["MO"]}");
                    chitie.DA     = short.Parse($"{row["DA"]}");
                    chitie.YEAR   = short.Parse($"{row["YEAR"]}");

                    var nd = float.Parse($"{row["NHIETDO"]}");
                    chitie.NHIETDO = (byte)nd;

                    var tmp = float.Parse($"{row["APSUAT"]}");
                    chitie.APSUAT   = tmp < 900 ? (short)tmp : (short)0;
                    tmp             = float.Parse($"{row["TOCDOGIO"]}");
                    chitie.TOCDOGIO = tmp < 900 ? (short)tmp : (short)0;
                    tmp             = float.Parse($"{row["TMAX"]}");
                    chitie.TMAX     = tmp < 900 ? (short)tmp : (short)0;
                    tmp             = float.Parse($"{row["TMIN"]}");
                    chitie.TMIN     = tmp < 900 ? (short)tmp : (short)0;
                    tmp             = float.Parse($"{row["LUONGMUA"]}");
                    chitie.LUONGMUA = tmp < 900 ? (short)tmp : (short)0;
                    re.Add(chitie);
                }
                return(re);
            }
        }
コード例 #5
0
    public void ExtractTable(
        string projectId  = "your-project-id",
        string bucketName = "your-bucket-name")
    {
        BigQueryClient client = BigQueryClient.Create(projectId);
        // Define a destination URI. Use a single wildcard URI if you think
        // your exported data will be larger than the 1 GB maximum value.
        string      destinationUri = $"gs://{bucketName}/shakespeare-*.csv";
        BigQueryJob job            = client.CreateExtractJob(
            projectId: "bigquery-public-data",
            datasetId: "samples",
            tableId: "shakespeare",
            destinationUri: destinationUri
            );

        job.PollUntilCompleted();  // Waits for the job to complete.
        Console.Write($"Exported table to {destinationUri}.");
    }
コード例 #6
0
        public async Task GetQueryResults_NonQuery()
        {
            var resource = new Job
            {
                JobReference = new JobReference {
                    ProjectId = "project", JobId = "job"
                },
                Configuration = new JobConfiguration
                {
                    Copy = new JobConfigurationTableCopy {
                    }
                }
            };
            var job = new BigQueryJob(new SimpleClient(), resource);

            Assert.Throws <InvalidOperationException>(() => job.GetQueryResults());
            await Assert.ThrowsAsync <InvalidOperationException>(() => job.GetQueryResultsAsync());
        }
コード例 #7
0
ファイル: Program.cs プロジェクト: mattosaurus/DataTransfer
        public static async Task ExportBigQueryTableToStorageAsync(BigQueryClient bigQueryClient, string destinationUri, BigQueryResults results)
        {
            CreateExtractJobOptions jobOptions = new CreateExtractJobOptions()
            {
                DestinationFormat = FileFormat.Csv,
                Compression       = CompressionType.Gzip
            };

            BigQueryJob job = bigQueryClient.CreateExtractJob(
                projectId: results.TableReference.ProjectId,
                datasetId: results.TableReference.DatasetId,
                tableId: results.TableReference.TableId,
                destinationUri: destinationUri,
                options: jobOptions
                );

            await job.PollUntilCompletedAsync();
        }
コード例 #8
0
        public async Task GetQueryResults_NoJobReference()
        {
            var resource = new Job
            {
                Configuration = new JobConfiguration
                {
                    DryRun = true,
                    Query  = new JobConfigurationQuery {
                        DestinationTable = new TableReference {
                            ProjectId = "project", DatasetId = "dataset", TableId = "table"
                        }
                    }
                }
            };
            var job = new BigQueryJob(new SimpleClient(), resource);

            Assert.Throws <InvalidOperationException>(() => job.GetQueryResults());
            await Assert.ThrowsAsync <InvalidOperationException>(() => job.GetQueryResultsAsync());
        }
コード例 #9
0
        public List <string> GetPopularTags()
        {
            const string format           = "yyyyMMdd";
            const string tablePrefix      = "analytics_198930456.events_";
            const string todayTablePrefix = "analytics_198930456.events_intraday_";

            var    todayDate  = DateTime.Today;
            string todayTable = todayTablePrefix + todayDate.ToString(format);
            string d2Table    = tablePrefix + todayDate.AddDays(-1).ToString(format);
            string d3Table    = tablePrefix + todayDate.AddDays(-2).ToString(format);

            CreateTableIfNotExist(todayTable);
            CreateTableIfNotExist(d2Table);
            CreateTableIfNotExist(d3Table);

            string query = @"
            SELECT event_name, COUNT(event_name) AS tag_count FROM ( " +
                           "SELECT event_name FROM `" + todayTable + "`, UNNEST(event_params) AS p1 WHERE p1.key = 'TAG' " +
                           "UNION ALL SELECT event_name FROM `" + d2Table + "`, UNNEST(event_params) AS p2 WHERE p2.key = 'TAG' " +
                           "UNION ALL SELECT event_name FROM `" + d3Table + "`, UNNEST(event_params) AS p3 WHERE p3.key = 'TAG')" +
                           "GROUP BY event_name " +
                           "ORDER BY tag_count DESC " +
                           "LIMIT 5";

            BigQueryJob job = client.CreateQueryJob(
                sql: query,
                parameters: null,
                options: new QueryOptions {
                UseQueryCache = false
            });

            job.PollUntilCompleted();

            List <string> tags = new List <string>();

            foreach (BigQueryRow row in client.GetQueryResults(job.Reference))
            {
                var tag = $"{row["event_name"]}";
                tags.Add(tag);
            }
            return(tags);
        }
コード例 #10
0
    public void ExtractTableJson(
        string projectId  = "your-project-id",
        string bucketName = "your-bucket-name")
    {
        BigQueryClient client         = BigQueryClient.Create(projectId);
        string         destinationUri = $"gs://{bucketName}/shakespeare.json";
        var            jobOptions     = new CreateExtractJobOptions()
        {
            DestinationFormat = FileFormat.NewlineDelimitedJson
        };
        BigQueryJob job = client.CreateExtractJob(
            projectId: "bigquery-public-data",
            datasetId: "samples",
            tableId: "shakespeare",
            destinationUri: destinationUri,
            options: jobOptions
            );

        job = job.PollUntilCompleted().ThrowOnAnyError();  // Waits for the job to complete.
        Console.Write($"Exported table to {destinationUri}.");
    }
コード例 #11
0
        public void Properties()
        {
            var resource = new Job
            {
                JobReference = new JobReference {
                    ProjectId = "project", JobId = "job"
                },
                Status = new JobStatus {
                    State = "RUNNING"
                },
                Statistics = new JobStatistics {
                    CreationTime = 1000L
                }
            };
            var job = new BigQueryJob(new SimpleClient(), resource);

            Assert.Same(resource, job.Resource);
            Assert.Same(resource.JobReference, job.Reference);
            Assert.Same(resource.Statistics, job.Statistics);
            Assert.Same(resource.Status, job.Status);
            Assert.Equal(JobState.Running, job.State);
        }
コード例 #12
0
        public void ThrowOnAnyError_WithErrors()
        {
            var resource = new Job
            {
                JobReference = new JobReference {
                    ProjectId = "project", JobId = "job"
                },
                Status = new JobStatus
                {
                    State  = "completed",
                    Errors = new[] { new ErrorProto {
                                         Message = "Error 1"
                                     }, new ErrorProto {
                                         Message = "Error 2"
                                     } }
                }
            };
            var job          = new BigQueryJob(new SimpleClient(), resource);
            var exception    = Assert.Throws <GoogleApiException>(() => job.ThrowOnAnyError());
            var requestError = exception.Error;

            Assert.Equal(new[] { "Error 1", "Error 2" }, requestError.Errors.Select(e => e.Message));
        }
コード例 #13
0
        protected override void ProcessRecord()
        {
            // Set Project for the lazy instantiation of a BQ Client object.
            Project = InputObject.ProjectId;

            try
            {
                // No options currently available, but class was added for future possibilities.
                BigQueryJob result = Client.CancelJob(InputObject, new CancelJobOptions());

                if (result == null)
                {
                    throw new Exception("Server response came back as null.");
                }

                WriteObject(result.Resource);
            }
            catch (Exception ex)
            {
                ThrowTerminatingError(new ErrorRecord(ex, "Failed to cancel job.",
                                                      ErrorCategory.InvalidOperation, this));
            }
        }
コード例 #14
0
    public void Query(
        string projectId = "your-project-id"
        )
    {
        BigQueryClient client = BigQueryClient.Create(projectId);
        string         query  = @"
            SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013`
            WHERE state = 'TX'
            LIMIT 100";
        BigQueryJob    job    = client.CreateQueryJob(
            sql: query,
            parameters: null,
            options: new QueryOptions {
            UseQueryCache = false
        });

        // Wait for the job to complete.
        job = job.PollUntilCompleted().ThrowOnAnyError();
        // Display the results
        foreach (BigQueryRow row in client.GetQueryResults(job.Reference))
        {
            Console.WriteLine($"{row["name"]}");
        }
    }
コード例 #15
0
        // [END import_from_file]

        // [START stream_row]
        public void UploadJson(string datasetId, string tableId, BigQueryClient client)
        {
            // Note that there's a single line per JSON object. This is not a JSON array.
            IEnumerable <string> jsonRows = new string[]
            {
                "{ 'title': 'exampleJsonFromStream', 'unique_words': 1}",
                "{ 'title': 'moreExampleJsonFromStream', 'unique_words': 1}",
                //add more rows here...
            }.Select(row => row.Replace('\'', '"')); // Simple way of representing C# in JSON to avoid escaping " everywhere.

            // Normally we'd be uploading from a file or similar. Any readable stream can be used.
            var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", jsonRows)));

            // This example uploads data to an existing table. If the upload will create a new table
            // or if the schema in the JSON isn't identical to the schema in the table,
            // create a schema to pass into the call instead of passing in a null value.
            BigQueryJob job = client.UploadJson(datasetId, tableId, null, stream);

            // Use the job to find out when the data has finished being inserted into the table,
            // report errors etc.

            // Wait for the job to complete.
            job.PollUntilCompleted();
        }