// [END create_dataset]

        // [START create_table]
        public void CreateTable(string datasetId, string tableId, BigQueryClient client)
        {
            var dataset = client.GetDataset(datasetId);
            // Create schema for new table.
            var schema = new TableSchemaBuilder
            {
                { "title", BigQueryDbType.String },
                { "unique_words", BigQueryDbType.Int64 }
            }.Build();
            // Create the table if it doesn't exist.
            BigQueryTable table = dataset.GetOrCreateTable(tableId, schema);
        }
 public BigQueryTest()
 {
     // [START create_bigquery_client]
     // By default, the Google.Bigquery.V2 library client will authenticate 
     // using the service account file (created in the Google Developers 
     // Console) specified by the GOOGLE_APPLICATION_CREDENTIALS 
     // environment variable. If you are running on
     // a Google Compute Engine VM, authentication is completely 
     // automatic.
     _projectId = Environment.GetEnvironmentVariable("GOOGLE_PROJECT_ID");
     _client = BigQueryClient.Create(_projectId);
     // [END create_bigquery_client]
 }
        // [END sync_query]

        // [START sync_query_legacy_sql]
        public BigQueryResults LegacySqlSyncQuery(string projectId, string datasetId,
            string tableId, string query, double timeoutMs, BigQueryClient client)
        {
            var table = client.GetTable(projectId, datasetId, tableId);
            BigQueryJob job = client.CreateQueryJob(query,
                new CreateQueryJobOptions { UseLegacySql = true });
            // Get the query result, waiting for the timespan specified in milliseconds.
            BigQueryResults result = client.GetQueryResults(job.Reference.JobId,
                new GetQueryResultsOptions { Timeout = TimeSpan.FromMilliseconds(timeoutMs) });
            return result;
        }
        private Task <BigQueryTable> GetTableAsync(DateTime date, CancellationToken cancellationToken)
        {
            BigQueryClient client = GetBiqQueryClient();

            return(client.GetTableAsync(googleBigQueryAttribute.DatasetId, $"{googleBigQueryAttribute.TableId}${date:yyyyMMdd}", null, cancellationToken));
        }
Beispiel #5
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="etlSettings"></param>
        /// <returns></returns>
        public static async Task <List <string> > TransferBigQueryResultByDate(this EtlSettings etlSettings, AWSAthenaAPI awsAthenaAPI)
        {
            var result = new List <string>();

            var awsS3Api = etlSettings.CreateTargetS3API();
            var ga       = etlSettings.GoogleAnalyticsQuerySource;

            Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", $"{AppContext.BaseDirectory}/{ga.GoogleAnalyticsSettingFile}");

            BigQueryClient client = BigQueryClient.Create(ga.GoogleAnalyticsProjectId);

            string sql = ga.BigQuerySQL;

            string dateQueryKey = DateTime.Now.AddDays(-ga.DaysAgo).ToString(ga.DateFormat);
            string dateKey      = DateTime.Now.AddDays(-ga.DaysAgo).ToString("yyyyMMdd");

            sql = sql.Replace("{date}", dateKey);

            var job = await client.CreateQueryJobAsync(sql, new List <BigQueryParameter>());

            BigQueryResults results = null;

            results = await client.GetQueryResultsAsync(job.Reference, new GetQueryResultsOptions()
            {
                StartIndex = 0,
                PageSize   = 20000,
            });

            var enumerator = results.GetEnumerator();

            List <BigQueryRow> rows = new List <BigQueryRow>();

            int parquetIndex = 0;

            var targetS3 = etlSettings.CreateTargetS3API();

            while (enumerator.MoveNext())
            {
                rows.Add(enumerator.Current);
                if (rows.Count >= etlSettings.NumberOfItemsPerParquet)
                {
                    var s3key = etlSettings.MakeTargetS3Key(dateKey, "", false, parquetIndex);
                    await targetS3.WriteResultRowsToS3Bucket(rows, results, etlSettings, s3key);

                    result.Add($"s3://{etlSettings.TargetS3BucketName}/{s3key}");
                    parquetIndex += 1;
                }
            }

            // write what ever left less than 200000
            if (rows.Count > 0)
            {
                var s3key = etlSettings.MakeTargetS3Key(dateKey, "", false, parquetIndex);
                await targetS3.WriteResultRowsToS3Bucket(rows, results, etlSettings, s3key);

                result.Add($"s3://{etlSettings.TargetS3BucketName}/{s3key}");
                parquetIndex += 1;
            }

            {
                // load partition to athena table
                await awsAthenaAPI.LoadPartition(
                    $"`{etlSettings.AthenaDatabaseName}`.`{etlSettings.AthenaTableName}`",
                    $"`{etlSettings.DatePartitionKey}` = '{dateKey}'",
                    $"s3://{etlSettings.TargetS3BucketName}/{etlSettings.TargetS3Prefix}/{dateKey}/");
            }

            return(result);
        }
 public DamageHistoryRepository(BigQueryClient client, string datasetId)
     : base(client, datasetId)
 {
 }
        // [END list_projects]

        // [START list_rows]
        public int ListRows(
            string projectId, string datasetId, string tableId, int numberOfRows,
            BigQueryClient client)
        {
            int recordCount = 0;
            var result = client.ListRows(projectId, datasetId, tableId, null,
                new ListRowsOptions { PageSize = numberOfRows });
            foreach (var row in result.Take(numberOfRows))
            {
                Console.WriteLine($"{row["word"]}: {row["corpus"]}");
                recordCount++;
            }
            return recordCount;
        }
 public void PopulateTable(
     string query, string datasetId, string newTableId, BigQueryClient client)
 {
     var destination = client.GetTableReference(datasetId, newTableId);
     BigQueryJob job = client.CreateQueryJob(query,
         new CreateQueryJobOptions { DestinationTable = destination });
     // Wait for the job to complete.
     job.PollQueryUntilCompleted();
 }
 public BigQueryInsertResultsTests()
 {
     _client = new DerivedBigQueryClient();
 }
Beispiel #10
0
        private BigQueryTable GetTable()
        {
            var client = BigQueryClient.Create(_fixture.ProjectId);

            return(client.GetTable(_fixture.DatasetId, _fixture.ExhaustiveTypesTableId));
        }
Beispiel #11
0
 public LoginRepository(BigQueryClient client, string datasetId)
     : base(client, datasetId)
 {
 }
Beispiel #12
0
        private void Form1_Load(object sender, EventArgs e)
        {
            //---------------- GLOBALS ----------------

            //Set GOOGLE_APPLICATION_CREDENTIALS env variable so that GoogleCredential.GetApplicationDefault() works
            Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", @"..\..\Credentials\key.json");
            string PROJECT_ID = "bqconnectordemo";

            //Create authenticated client
            var credential = GoogleCredential.GetApplicationDefault();
            var client     = BigQueryClient.Create(PROJECT_ID, credential);

            //Set datasetId and tableName variables to identify the dataset and table to interact with (querying or streaming)
            string datasetId = "demo";
            string tableName = "BitcoinBlockchainMetrics";

            //----------------------------------------------
            //---------------- QUERY IMPORT ----------------

            //Query a table
            var table = client.GetTable(PROJECT_ID, datasetId, tableName);
            var sql   =
                $"SELECT * FROM {table} WHERE DATE(DateTime) = '2018-09-17'";

            //Execute Query
            var results = client.ExecuteQuery(sql, parameters: null);

            //Create and populate DataTable
            DataTable dt = CreateDataTable(results);

            foreach (var row in results)
            {
                PopulateDataTable(dt, row);
            }

            //Populate DataGridView
            dataGridView1.DataSource = dt;
            dataGridView1.Refresh();

            //--------------------------------------------------
            //---------------- STREAMING EXPORT ----------------

            List <BigQueryInsertRow> bQIRows = new List <BigQueryInsertRow>();

            string[] columnNames = dt.Columns
                                   .Cast <DataColumn>()
                                   .Select(x => x.ColumnName)
                                   .ToArray();

            var count = 0; //DEMO ONLY!!!

            //Build list of rows to stream (bQIRows)
            foreach (DataRow dr in dt.Rows)
            {
                if (count > 10)
                {
                    break;
                }
                bQIRows.Add(PopulateBigQueryInsertRow(dr, columnNames, client, datasetId, tableName));
                count++; //DEMO ONLY!!!
            }

            //Stream bQIRows up to BigQuery
            client.InsertRows(datasetId, tableName, bQIRows);

            //---------------------------------------------------
            //---------------- MISC EXAMPLE CODE ----------------

            //Set query options
            //var queryOptions = new QueryOptions
            //{
            //    UseQueryCache = false
            //};
            //var results = client.ExecuteQuery(sql, parameters: null, queryOptions: queryOptions);

            //Console.WriteLine(results.SafeTotalRows);
            //var job = client.GetJob(results.JobReference);
            //var stats = job.Statistics;
            //Console.WriteLine("-----------");
            //Console.WriteLine($"Creation time: {stats.CreationTime}");
            //Console.WriteLine($"End time: {stats.EndTime}");
            //Console.WriteLine($"Total bytes processed: {stats.TotalBytesProcessed}");
        }
Beispiel #13
0
        private static BigQueryInsertRow PopulateBigQueryInsertRow(DataRow dr, string[] cNames, BigQueryClient client, string datasetId, string tableName)
        {
            var    bQIRow = new BigQueryInsertRow();
            string type;

            foreach (var key in cNames)
            {
                type = GetBigQueryType(client, datasetId, tableName, key);

                if (type == "TIMESTAMP")
                {
                    string value = DateTime.Parse((string)dr[key]).ToString("yyyy'-'MM'-'dd'T'HH':'mm':'ss");
                    bQIRow.Add(key, value);
                }
                else
                {
                    bQIRow.Add(key, dr[key]);
                }
            }

            return(bQIRow);
        }
Beispiel #14
0
 public UnitMasterRepository(BigQueryClient client, string datasetId, MasterData masterData)
     : base(client, datasetId, "unit_master", masterData)
 {
 }
Beispiel #15
0
        private bool BuildTable()
        {
            var tableName = ComputerName == "CH-J7TMTZ1" ? "CCChinaTable" : "CCTable";

#if DEBUG
            tableName = "CCChinaTable";
#endif
            try {
                _client  = BigQueryClient.Create("booming-pride-278623");
                _dataset = _client.GetOrCreateDataset("CCDataset");
                _table   = _dataset.GetOrCreateTable(tableName, new TableSchemaBuilder
                {
                    { "CycleNumber", BigQueryDbType.Int64 },
                    { "LogTime_Timestamp", BigQueryDbType.Timestamp },
                    { "TotalTest_Hours", BigQueryDbType.Float64 },
                    { "MinutesIntoCycle", BigQueryDbType.Float64 },
                    { "CurrentBiasIsOn", BigQueryDbType.String },
                    { "SampleName", BigQueryDbType.String },
                    { "Current_Amps", BigQueryDbType.Float64 },
                    { "Voltage_Volts", BigQueryDbType.Float64 },
                    { "NumCells", BigQueryDbType.Int64 },
                    { "CellVoc_Volts", BigQueryDbType.Float64 },
                    { "TempSensorNumber", BigQueryDbType.Int64 },
                    { "SetCurrent_Amps", BigQueryDbType.Float64 },
                    { "EstimatedRs_mOhms", BigQueryDbType.Float64 },
                    { "Temp1_C", BigQueryDbType.Float64 },
                    { "Temp2_C", BigQueryDbType.Float64 },
                    { "Temp3_C", BigQueryDbType.Float64 },
                    { "Temp4_C", BigQueryDbType.Float64 },
                    { "Temp5_C", BigQueryDbType.Float64 },
                    { "Temp6_C", BigQueryDbType.Float64 },
                    { "Temp7_C", BigQueryDbType.Float64 },
                    { "Temp8_C", BigQueryDbType.Float64 },
                    { "Temp9_C", BigQueryDbType.Float64 },
                    { "Temp10_C", BigQueryDbType.Float64 },
                    { "Temp11_C", BigQueryDbType.Float64 },
                    { "Temp12_C", BigQueryDbType.Float64 },
                    { "Temp13_C", BigQueryDbType.Float64 },
                    { "Temp14_C", BigQueryDbType.Float64 },
                    { "Temp15_C", BigQueryDbType.Float64 },
                    { "Temp16_C", BigQueryDbType.Float64 },
                    { "SmokeLevel1_Volts", BigQueryDbType.Float64 },
                    { "SmokeLevel2_Volts", BigQueryDbType.Float64 },
                    { "SmokeLevel3_Volts", BigQueryDbType.Float64 },
                    { "SmokeLevel4_Volts", BigQueryDbType.Float64 },
                    { "SmokeLevel5_Volts", BigQueryDbType.Float64 },
                    { "SmokeLevel6_Volts", BigQueryDbType.Float64 },
                    { "SmokeLevel7_Volts", BigQueryDbType.Float64 },
                    { "SmokeLevel8_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage1_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage2_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage3_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage4_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage5_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage6_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage7_Volts", BigQueryDbType.Float64 },
                    { "SmokeVoltage8_Volts", BigQueryDbType.Float64 },
                }.Build());
                return(true);
            }
            catch {
                return(false);
            }
        }
        // [END async_query]


        // [START import_from_file]
        public void UploadJsonFromFile(string projectId, string datasetId, string tableId,
            string fileName, BigQueryClient client)
        {
            using (FileStream stream = File.Open(fileName, FileMode.Open))
            {
                // This example uploads data to an existing table. If the upload will create a new table
                // or if the schema in the JSON isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigQueryJob job = client.UploadJson(datasetId, tableId, null, stream);
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                job.PollUntilCompleted();
            }
        }
        // [END stream_row]

        // [START export_to_cloud_storage]
        public void ExportJsonToGcs(
            string datasetId, string tableId, string bucketName, string fileName,
            BigQueryClient client)
        {
            StorageClient gcsClient = StorageClient.Create();
            string contentType = "application/json";
            // Get Table and append results into StringBuilder.
            PagedEnumerable<TableDataList, BigQueryRow> result = client.ListRows(datasetId, tableId);
            StringBuilder sb = new StringBuilder();
            foreach (var row in result)
            {
                sb.Append($"{{\"title\" : \"{row["title"]}\", \"unique_words\":\"{row["unique_words"]}\"}}{Environment.NewLine}");
            }
            // Save stream to Google Cloud Storage.
            using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString())))
            {
                var obj = gcsClient.UploadObject(bucketName, fileName, contentType, stream);
            }
        }
 public void Constructor_NullParameters_Fail(
     BigQueryClient client, InsertOptions options, IReadOnlyList <BigQueryInsertRow> rows, TableDataInsertAllResponse response) =>
 Assert.Throws <ArgumentNullException>(() => new BigQueryInsertResults(client, options, rows, response));
 // [START list_datasets]
 public List<BigQueryDataset> ListDatasets(BigQueryClient client)
 {
     var datasets = client.ListDatasets().ToList();
     return datasets;
 }
Beispiel #20
0
 public Erc721UsageQuery(BigQueryClient client)
 {
     _client = client;
 }
 public MatchUseUnitHistoryRepository(BigQueryClient client, string datasetId)
     : base(client, datasetId)
 {
 }
Beispiel #22
0
 public MatchExecutionHistoryRepository(BigQueryClient client, string datasetId)
     : base(client, datasetId)
 {
 }
 // [START delete_table]
 public void DeleteTable(string datasetId, string tableId, BigQueryClient client)
 {
     client.DeleteTable(_projectId, datasetId, tableId);
 }
Beispiel #24
0
        /// <summary>
        /// Handle the intent.
        /// </summary>
        /// <param name="req"></param>
        /// <returns></returns>
        public override async Task <string> HandleAsync(ConvRequest req)
        {
            // Extract the DialogFlow date, without the time, that has been requested
            // Format is "yyyy-mm-dd"
            var date = req.Parameters["date"];

            date = date.Substring(0, Math.Min(10, date.Length));

            // Create the BigQuery client with default credentials
            var bigQueryClient = await BigQueryClient.CreateAsync(Program.AppSettings.GoogleCloudSettings.ProjectId);

            // Build the parameterized SQL query
            var table = bigQueryClient.GetTable("bigquery-public-data", "hacker_news", "full");
            var sql   = $"SELECT title, url \nFROM {table} \n" +
                        "WHERE STARTS_WITH(CAST(timestamp AS STRING), @date) AND type=\"story\" \n" +
                        "ORDER BY score DESC \n" +
                        "LIMIT 10";

            // Create the BigQuery parameters
            var parameters = new[]
            {
                new BigQueryParameter("date", BigQueryDbType.String, date)
            };

            // Show SQL query in browser
            ShowQuery(sql, parameters);

            // Time the BigQuery execution with a StopWatch
            var stopwatch = new Stopwatch();

            stopwatch.Start();

            // Execute BigQuery SQL query. This can take time
            var result = await bigQueryClient.ExecuteQueryAsync(sql, parameters);

            // Query finished, stop the StopWatch
            stopwatch.Stop();

            // Get a job reference, for statistics
            var job = await bigQueryClient.GetJobAsync(result.JobReference);

            // Get result list, and check that there are some results
            var resultList = result.ToList();

            if (resultList.Count == 0)
            {
                return(DialogflowApp.Tell("Sorry, there is no data for that date."));
            }

            // Time and data statistics
            long   processedMb = job.Statistics.TotalBytesProcessed.Value / (1024 * 1024);
            double secs        = stopwatch.Elapsed.TotalSeconds;
            var    titles      = resultList.Select(x => x["title"].ToString()).ToList();

            // Show SQL query and query results in browser
            ShowQuery(sql, parameters, (processedMb, secs, titles));

            // Send spoken response to DialogFlow
            return(DialogflowApp.Tell($"Scanned {processedMb} mega-bytes in {secs:0.0} seconds. " +
                                      $"The top title on hacker news was titled: {titles.First()}"));
        }
Beispiel #25
0
 public TokenUsageQuery(BigQueryClient client)
 {
     _client = client;
 }
Beispiel #26
0
 public SetOpenTypeRepository(BigQueryClient client, string datasetId)
     : base(client, datasetId)
 {
 }
Beispiel #27
0
        public static async Task GetBigQueryResultSampleByDate(this EtlSettings etlSettings, int lines)
        {
            var awsS3Api = etlSettings.CreateTargetS3API();
            var ga       = etlSettings.GoogleAnalyticsQuerySource;

            Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", $"{AppContext.BaseDirectory}/{ga.GoogleAnalyticsSettingFile}");

            BigQueryClient client = BigQueryClient.Create(ga.GoogleAnalyticsProjectId);

            string sql = ga.BigQuerySQL;

            string dateQueryKey = DateTime.Now.AddDays(-ga.DaysAgo).ToString(ga.DateFormat);

            // make sure the query is limited by 20
            sql = sql.Replace("{date}", dateQueryKey) + $"\nlimit {lines}";

            var job = await client.CreateQueryJobAsync(sql, new List <BigQueryParameter>());

            BigQueryResults results = null;

            results = await client.GetQueryResultsAsync(job.Reference, new GetQueryResultsOptions()
            {
                StartIndex = 0,
                PageSize   = 20000,
            });

            var enumerator = results.GetEnumerator();

            List <BigQueryRow> rows = new List <BigQueryRow>();

            while (enumerator.MoveNext())
            {
                rows.Add(enumerator.Current);
            }

            // map schema to athena types

            etlSettings.Mappings = results.ToFieldMappings();
            var sample = new DataSample()
            {
                Rows = new List <DataRow>()
            };

            // convert big query data to sample data
            foreach (var row in rows)
            {
                sample.Rows.Add(new DataRow()
                {
                    Items = row.RawRow.F.Select(item =>
                    {
                        if (item.V == null)
                        {
                            return("");
                        }
                        else if (item.V.GetType() == typeof(DateTime))
                        {
                            return(((DateTime)item.V).ToString("o"));
                        }
                        else if (item.V.GetType() == typeof(byte[]))
                        {
                            return(Convert.ToBase64String((byte[])item.V));
                        }
                        else
                        {
                            return(item.V.ToString());
                        }
                    }).ToList()
                });
            }
            etlSettings.Sample = sample;
        }
Beispiel #28
0
 public PingResultsRepository(BigQueryClient client, string datasetId)
     : base(client, datasetId)
 {
 }
        private Task <BigQueryTable> GetTableAsync(CancellationToken cancellationToken)
        {
            BigQueryClient client = GetBiqQueryClient();

            return(client.GetTableAsync(googleBigQueryAttribute.DatasetId, googleBigQueryAttribute.TableId, null, cancellationToken));
        }
 public BqCmdlet()
 {
     _service = new Lazy <BigqueryService>(() => new BigqueryService(GetBaseClientServiceInitializer()));
     _client  = new Lazy <BigQueryClient>(() => BigQueryClient.Create(Project));
 }
        // [END sync_query_legacy_sql]

        // [START async_query]
        public BigQueryResults AsyncQuery(string projectId, string datasetId, string tableId,
            string query, BigQueryClient client)
        {
            var table = client.GetTable(projectId, datasetId, tableId);
            BigQueryJob job = client.CreateQueryJob(query,
                new CreateQueryJobOptions { UseQueryCache = false });

            // Wait for the job to complete.
            job.PollUntilCompleted();

            // Then we can fetch the results, either via the job or by accessing
            // the destination table.
            return client.GetQueryResults(job.Reference.JobId);
        }
Beispiel #32
0
    public static object Inspect(
        string projectId,
        string minLikelihood,
        int maxFindings,
        bool includeQuote,
        IEnumerable <FieldId> identifyingFields,
        IEnumerable <InfoType> infoTypes,
        IEnumerable <CustomInfoType> customInfoTypes,
        string datasetId,
        string tableId)
    {
        var inspectJob = new InspectJobConfig
        {
            StorageConfig = new StorageConfig
            {
                BigQueryOptions = new BigQueryOptions
                {
                    TableReference = new Google.Cloud.Dlp.V2.BigQueryTable
                    {
                        ProjectId = projectId,
                        DatasetId = datasetId,
                        TableId   = tableId,
                    },
                    IdentifyingFields =
                    {
                        identifyingFields
                    }
                },

                TimespanConfig = new StorageConfig.Types.TimespanConfig
                {
                    StartTime = Timestamp.FromDateTime(System.DateTime.UtcNow.AddYears(-1)),
                    EndTime   = Timestamp.FromDateTime(System.DateTime.UtcNow)
                }
            },

            InspectConfig = new InspectConfig
            {
                InfoTypes       = { infoTypes },
                CustomInfoTypes = { customInfoTypes },
                Limits          = new FindingLimits
                {
                    MaxFindingsPerRequest = maxFindings
                },
                ExcludeInfoTypes = false,
                IncludeQuote     = includeQuote,
                MinLikelihood    = (Likelihood)System.Enum.Parse(typeof(Likelihood), minLikelihood)
            },
            Actions =
            {
                new Google.Cloud.Dlp.V2.Action
                {
                    // Save results in BigQuery Table
                    SaveFindings = new Google.Cloud.Dlp.V2.Action.Types.SaveFindings
                    {
                        OutputConfig = new OutputStorageConfig
                        {
                            Table = new Google.Cloud.Dlp.V2.BigQueryTable
                            {
                                ProjectId = projectId,
                                DatasetId = datasetId,
                                TableId   = tableId
                            }
                        }
                    },
                }
            }
        };

        // Issue Create Dlp Job Request
        var client  = DlpServiceClient.Create();
        var request = new CreateDlpJobRequest
        {
            InspectJob          = inspectJob,
            ParentAsProjectName = new ProjectName(projectId),
        };

        // We need created job name
        var dlpJob  = client.CreateDlpJob(request);
        var jobName = dlpJob.Name;

        // Make sure the job finishes before inspecting the results.
        // Alternatively, we can inspect results opportunistically, but
        // for testing purposes, we want consistent outcome
        var finishedJob    = EnsureJobFinishes(projectId, jobName);
        var bigQueryClient = BigQueryClient.Create(projectId);
        var table          = bigQueryClient.GetTable(datasetId, tableId);

        // Return only first page of 10 rows
        Console.WriteLine("DLP v2 Results:");
        var firstPage = table.ListRows(new ListRowsOptions {
            StartIndex = 0, PageSize = 10
        });

        foreach (var item in firstPage)
        {
            Console.WriteLine($"\t {item[""]}");
        }

        return(finishedJob);
    }
        // [END import_from_file]

        // [START stream_row]
        public void UploadJson(string datasetId, string tableId, BigQueryClient client)
        {
            // Note that there's a single line per JSON object. This is not a JSON array.
            IEnumerable<string> jsonRows = new string[]
            {
                "{ 'title': 'exampleJsonFromStream', 'unique_words': 1}",
                "{ 'title': 'moreExampleJsonFromStream', 'unique_words': 1}",
                //add more rows here...
            }.Select(row => row.Replace('\'', '"')); // Simple way of representing C# in JSON to avoid escaping " everywhere.

            // Normally we'd be uploading from a file or similar. Any readable stream can be used.
            var stream = new MemoryStream(Encoding.UTF8.GetBytes(string.Join("\n", jsonRows)));

            // This example uploads data to an existing table. If the upload will create a new table
            // or if the schema in the JSON isn't identical to the schema in the table,
            // create a schema to pass into the call instead of passing in a null value.
            BigQueryJob job = client.UploadJson(datasetId, tableId, null, stream);
            // Use the job to find out when the data has finished being inserted into the table,
            // report errors etc.

            // Wait for the job to complete.
            job.PollUntilCompleted();
        }
 private static BigQueryClient CreateBigQueryClient(string projectId, string serviceKeyFile) =>
 string.IsNullOrEmpty(serviceKeyFile)
         ? BigQueryClient.Create(projectId)
         : BigQueryClient.Create(projectId, GoogleCredential.FromFile(serviceKeyFile));
        // [END export_to_cloud_storage]

        public void ExportCsvToGcs(
            string datasetId, string tableId, string bucketName, string fileName, BigQueryClient client)
        {
            StorageClient gcsClient = StorageClient.Create();
            string contentType = "text/csv";
            // Get Table and input results into StringBuilder.
            var result = client.ListRows(datasetId, tableId);
            StringBuilder sb = new StringBuilder();
            // Create header row.
            sb.Append($"title, unique_words,{Environment.NewLine}");
            foreach (var row in result)
            {
                sb.Append($"{row["title"]}, {row["unique_words"]},{Environment.NewLine}");
            }
            // Save stream to Google Cloud Storage.
            using (var stream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString())))
            {
                var obj = gcsClient.UploadObject(bucketName, fileName, contentType, stream);
            }
        }
Beispiel #36
0
 // [START create_dataset]
 public void CreateDataset(string datasetId, BigQueryClient client)
 {
     var dataset = client.GetOrCreateDataset(datasetId);
 }
 // [START copy_table]
 public void CopyTable(
     string datasetId, string tableIdToBeCopied, string newTableId, BigQueryClient client)
 {
     var table = client.GetTable(datasetId, tableIdToBeCopied);
     string query = $"SELECT * FROM {table}";
     var destination = client.GetTableReference(datasetId, newTableId);
     BigQueryJob job = client.CreateQueryJob(query,
         new CreateQueryJobOptions { DestinationTable = destination });
     // Wait for the job to complete.
     job.PollQueryUntilCompleted();
 }
Beispiel #38
0
        // [END create_table]

        public void DeleteDataset(string datasetId, BigQueryClient client)
        {
            _retryDeleteBusy.Eventually(() => client.DeleteDataset(datasetId));
        }
        // [END list_datasets]

        // [START list_projects]
        public List<CloudProject> ListProjects(BigQueryClient client)
        {
            var projects = client.ListProjects().ToList();
            return projects;
        }
Beispiel #40
0
 // [START delete_table]
 public void DeleteTable(string datasetId, string tableId, BigQueryClient client)
 {
     client.DeleteTable(_projectId, datasetId, tableId);
 }
        // [END list_rows]

        // [START browse_table]
        public int TableDataList(
            string datasetId, string tableId, int pageSize, BigQueryClient client)
        {
            int recordCount = 0;
            var result = client.ListRows(datasetId, tableId, null,
                new ListRowsOptions { PageSize = pageSize });
            // If there are more rows than were returned in the first page of results, 
            // iterating over the rows will lazily evaluate the results each time, 
            // making further requests as necessary.
            foreach (var row in result)
            {
                Console.WriteLine($"{row["title"]}: {row["unique_words"]}");
                recordCount++;
            }
            return recordCount;
        }
Beispiel #42
0
        // [END sync_query]

        // [START sync_query_legacy_sql]
        public BigQueryResults LegacySqlSyncQuery(string projectId, string datasetId,
                                                  string tableId, string query, double timeoutMs, BigQueryClient client)
        {
            var         table = client.GetTable(projectId, datasetId, tableId);
            BigQueryJob job   = client.CreateQueryJob(query,
                                                      parameters: null,
                                                      options: new QueryOptions {
                UseLegacySql = true
            });
            // Get the query result, waiting for the timespan specified in milliseconds.
            BigQueryResults result = client.GetQueryResults(job.Reference.JobId,
                                                            new GetQueryResultsOptions {
                Timeout = TimeSpan.FromMilliseconds(timeoutMs)
            });

            return(result);
        }
 // [START create_dataset]
 public void CreateDataset(string datasetId, BigQueryClient client)
 {
     var dataset = client.GetOrCreateDataset(datasetId);
 }
Beispiel #44
0
        // [START list_datasets]
        public List <BigQueryDataset> ListDatasets(BigQueryClient client)
        {
            var datasets = client.ListDatasets().ToList();

            return(datasets);
        }
        // [END create_table]

        public void DeleteDataset(string datasetId, BigQueryClient client)
        {
            _retryDeleteBusy.Eventually(() => client.DeleteDataset(datasetId));
        }
Beispiel #46
0
        // [END list_datasets]

        // [START list_projects]
        public List <CloudProject> ListProjects(BigQueryClient client)
        {
            var projects = client.ListProjects().ToList();

            return(projects);
        }
        // [END delete_table]

        // [START import_file_from_gcs]
        public void ImportDataFromCloudStorage(string projectId, string datasetId,
            string tableId, BigQueryClient client, string fileName, string folder = null)
        {
            StorageClient gcsClient = StorageClient.Create();

            using (var stream = new MemoryStream())
            {
                // Set Cloud Storage Bucket name. This uses a bucket named the same as the project.
                string bucket = projectId;
                // If folder is passed in, add it to Cloud Storage File Path using "/" character
                string filePath = string.IsNullOrEmpty(folder) ? fileName : folder + "/" + fileName;
                // Download Google Cloud Storage object into stream
                gcsClient.DownloadObject(projectId, filePath, stream);

                // This example uploads data to an existing table. If the upload will create a new table
                // or if the schema in the JSON isn't identical to the schema in the table,
                // create a schema to pass into the call instead of passing in a null value.
                BigQueryJob job = client.UploadJson(datasetId, tableId, null, stream);
                // Use the job to find out when the data has finished being inserted into the table,
                // report errors etc.

                // Wait for the job to complete.
                job.PollUntilCompleted();
            }
        }
Beispiel #48
0
        /// <summary>
        /// Handle the intent.
        /// </summary>
        /// <param name="req"></param>
        /// <returns></returns>
        public override async Task <string> HandleAsync(ConvRequest req)
        {
            var errorMessage = ExtractAndValidateParameters(req, out string temp, out string year, out string countryCode2,
                                                            out string countryName, out string fipsCountry);

            if (errorMessage != null)
            {
                return(DialogflowApp.Tell(errorMessage));
            }

            var bigQueryClient = await BigQueryClient.CreateAsync(Program.AppSettings.GoogleCloudSettings.ProjectId);

            // Build the parameterized SQL query
            var tableGsod     = bigQueryClient.GetTable("bigquery-public-data", "noaa_gsod", $"gsod*");
            var tableStations = bigQueryClient.GetTable("bigquery-public-data", "noaa_gsod", "stations");
            var sql           = $"SELECT (gsod.temp - 32)*5/8 AS celcius, stations.name AS name, gsod.year AS y, gsod.mo AS m, gsod.da AS d \n" +
                                $"FROM {tableGsod} AS gsod \n" +
                                $"JOIN {tableStations} AS stations ON gsod.stn = stations.usaf AND gsod.wban = stations.wban \n" +
                                $"WHERE stations.country=@fipsCountry and gsod.year=@year \n" +
                                $"ORDER BY gsod.temp{(temp == "hottest" ? " DESC" : "")} \n" +
                                "limit 10";

            // Create the BigQuery parameters
            var parameters = new[]
            {
                new BigQueryParameter("fipsCountry", BigQueryDbType.String, fipsCountry),
                new BigQueryParameter("year", BigQueryDbType.String, year)
            };

            // Show SQL query in browser
            ShowQuery(sql, parameters);

            // Time the BigQuery execution with a StopWatch
            var stopwatch = new Stopwatch();

            stopwatch.Start();

            // Execute BigQuery SQL query. This can take time
            var result = await bigQueryClient.ExecuteQueryAsync(sql, parameters);

            // Query finished, stop the StopWatch
            stopwatch.Stop();

            // Get a job reference, for statistics
            var job = await bigQueryClient.GetJobAsync(result.JobReference);

            // Get result list, and check that there are some results
            var resultList = result.ToList();

            if (resultList.Count == 0)
            {
                return(DialogflowApp.Tell($"Sorry, there is no data for country '{countryName}'"));
            }

            // Time and data statistics
            long   processedMb  = job.Statistics.TotalBytesProcessed.Value / (1024 * 1024);
            double secs         = stopwatch.Elapsed.TotalSeconds;
            var    temperatures = resultList
                                  .Select(x => $"{(double)x["celcius"]:0.0}&deg;C at {x["name"]} on {x["y"]}-{x["m"]}-{x["d"]}")
                                  .ToList();

            // Show SQL query and query results in browser
            ShowQuery(sql, parameters, (processedMb, secs, temperatures));

            // Send spoken response to DialogFlow
            var top = resultList[0];

            return(DialogflowApp.Tell($"Scanned {processedMb} mega-bytes in {secs:0.0} seconds. " +
                                      $"The {temp} temperature in {countryName} in the year {year} was " +
                                      $"{(double)top["celcius"]:0.0} degrees celcius, at the {top["name"]} monitoring station."));
        }