public QueryTest() { // auth json! var json = ""; var projectId = ""; using (var ms = new MemoryStream(Encoding.UTF8.GetBytes(json))) { var authToken = GoogleWebAuthorizationBroker.AuthorizeAsync(ms, new[] { BigqueryService.Scope.Bigquery }, "user", CancellationToken.None, new FileDataStore(@"LINQtoBigQuery")) .Result; var service = new BigqueryService(new BaseClientService.Initializer { ApplicationName = "LINQ to BigQuery", HttpClientInitializer = authToken }); var rowsParser = new DeserializerRowsParser(); context = new BigQueryContext(rowsParser, service, projectId); } }
public static void Main(string[] args) { // Register an authenticator. var provider = new NativeApplicationClient(GoogleAuthenticationServer.Description); provider.ClientIdentifier = clientId; provider.ClientSecret = clientSecret; // Initiate an OAuth 2.0 flow to get an access token var auth = new OAuth2Authenticator <NativeApplicationClient>(provider, GetAuthorization); // Create the service. var service = new BigqueryService(auth); JobsResource j = service.Jobs; QueryRequest qr = new QueryRequest(); qr.Query = query; QueryResponse response = j.Query(qr, projectId).Fetch(); foreach (TableRow row in response.Rows) { List <string> list = new List <string>(); foreach (TableRow.FData field in row.F) { list.Add(field.V); } Console.WriteLine(String.Join("\t", list)); } Console.WriteLine("\nPress enter to exit"); Console.ReadLine(); }
/// <summary> /// Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role. /// Documentation https://developers.google.com/bigquery/v2/reference/tabledata/insertAll /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">Project ID of the destination table.</param> /// <param name="datasetId">Dataset ID of the destination table.</param> /// <param name="tableId">Table ID of the destination table.</param> /// <param name="body">A valid Bigquery v2 body.</param> /// <returns>TableDataInsertAllResponseResponse</returns> public static TableDataInsertAllResponse InsertAll(BigqueryService service, string projectId, string datasetId, string tableId, TableDataInsertAllRequest body) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (body == null) { throw new ArgumentNullException("body"); } if (projectId == null) { throw new ArgumentNullException(projectId); } if (datasetId == null) { throw new ArgumentNullException(datasetId); } if (tableId == null) { throw new ArgumentNullException(tableId); } // Make the request. return(service.Tabledata.InsertAll(body, projectId, datasetId, tableId).Execute()); } catch (Exception ex) { throw new Exception("Request Tabledata.InsertAll failed.", ex); } }
private static void Main() { UserCredential credential; using (var stream = new FileStream("client_secrets.json", FileMode.Open, FileAccess.Read)) { credential = GoogleWebAuthorizationBroker.AuthorizeAsync( GoogleClientSecrets.Load(stream).Secrets, new[] { BigqueryService.Scope.Bigquery }, "user", CancellationToken.None).Result; } // Create and initialize the Bigquery service. Use the Project Name value // from the New Project window for the ApplicationName variable. BigqueryService Service = new BigqueryService(new BaseClientService.Initializer() { HttpClientInitializer = credential, ApplicationName = "PROJECT NAME" }); string query = "YOUR QUERY"; JobsResource j = Service.Jobs; QueryRequest qr = new QueryRequest(); qr.Query = query; DataTable DT = new DataTable(); int i = 0; QueryResponse response = j.Query(qr, "PROJECT ID").Execute(); if (response != null) { int colCount = response.Schema.Fields.Count; foreach (var Column in response.Schema.Fields) { DT.Columns.Add(Column.Name); } foreach (TableRow row in response.Rows) { DataRow dr = DT.NewRow(); for (i = 0; i < colCount; i++) { dr[i] = row.F[i].V; } DT.Rows.Add(dr); } } else { Console.WriteLine("Response is null"); } }
// [END print_results] private static void Main(string[] args) { BigquerySample sample = new BigquerySample(); string projectId = null; if (args.Length == 0) { Console.WriteLine(usage); } else { projectId = args[0]; // Create a new Bigquery client authorized via Application Default // Credentials. BigqueryService bigquery = sample.CreateAuthorizedClient(); IList <TableRow> rows = sample.ExecuteQuery( "SELECT TOP(corpus, 10) as title, COUNT(*) as unique_words " + "FROM [publicdata:samples.shakespeare]", bigquery, projectId); sample.PrintResults(rows); } Console.WriteLine("\nPress any key..."); Console.ReadKey(); }
/// <summary> /// Synchronously creates a <see cref="BigqueryClient"/>, using application default credentials if /// no credentials are specified. /// </summary> /// <remarks> /// The credentials are scoped as necessary. /// </remarks> /// <param name="credential">Optional <see cref="GoogleCredential"/>.</param> /// <returns>The created <see cref="BigqueryClient"/>.</returns> public static BigqueryClient Create(string projectId, GoogleCredential credential = null) { GaxRestPreconditions.CheckNotNull(projectId, nameof(projectId)); try { credential = credential ?? Task.Run(() => GoogleCredential.GetApplicationDefaultAsync()).Result; } catch (AggregateException e) { // Unwrap the first exception, a bit like await would. // It's very unlikely that we'd ever see an AggregateException without an inner exceptions, // but let's handle it relatively gracefully. throw e.InnerExceptions.FirstOrDefault() ?? e; } if (credential.IsCreateScopedRequired) { credential = credential.CreateScoped( // FIXME: Work out which of these we actually need. BigqueryService.Scope.Bigquery, BigqueryService.Scope.BigqueryInsertdata, BigqueryService.Scope.DevstorageFullControl, BigqueryService.Scope.CloudPlatform); } var service = new BigqueryService(new BaseClientService.Initializer { HttpClientInitializer = credential, ApplicationName = BigqueryClientImpl.ApplicationName, }); return(new BigqueryClientImpl(projectId, service)); }
/// <summary> /// Authenticate to Google Using Oauth2 /// Documentation https://developers.google.com/accounts/docs/OAuth2 /// </summary> /// <param name="clientId">From Google Developer console https://console.developers.google.com</param> /// <param name="clientSecret">From Google Developer console https://console.developers.google.com</param> /// <param name="userName">A string used to identify a user.</param> /// <returns></returns> public static BigqueryService AuthenticateOauth(string clientId, string clientSecret, string userName) { string[] scopes = new string[] { BigqueryService.Scope.Bigquery, // view and manage your BigQuery data BigqueryService.Scope.BigqueryInsertdata, // Insert Data into Big query BigqueryService.Scope.CloudPlatform, // view and manage your data acroos cloud platform services BigqueryService.Scope.DevstorageFullControl, // manage your data on Cloud platform services BigqueryService.Scope.DevstorageReadOnly, // view your data on cloud platform servies BigqueryService.Scope.DevstorageReadWrite }; // manage your data on cloud platform servies try { // here is where we Request the user to give us access, or use the Refresh Token that was previously stored in %AppData% UserCredential credential = GoogleWebAuthorizationBroker.AuthorizeAsync(new ClientSecrets { ClientId = clientId, ClientSecret = clientSecret } , scopes , userName , CancellationToken.None , new FileDataStore("Daimto.BigQuery.Auth.Store")).Result; BigqueryService service = new BigqueryService(new BaseClientService.Initializer() { HttpClientInitializer = credential, ApplicationName = "BigQuery API Sample", }); return(service); } catch (Exception ex) { Console.WriteLine(ex.InnerException); return(null); } }
internal BigQueryClientImpl(string projectId, BigqueryService service, string defaultLocation, bool prettyPrint) { ProjectId = GaxPreconditions.CheckNotNull(projectId, nameof(projectId)); Service = GaxPreconditions.CheckNotNull(service, nameof(service)); DefaultLocation = defaultLocation; PrettyPrint = prettyPrint; }
/// <summary> /// Authenticate to Google Using Oauth2 /// Documentation https://developers.google.com/accounts/docs/OAuth2 /// </summary> /// <param name="clientId">From Google Developer console https://console.developers.google.com</param> /// <param name="clientSecret">From Google Developer console https://console.developers.google.com</param> /// <param name="userName">A string used to identify a user.</param> /// <returns></returns> public static BigqueryService AuthenticateOauth(string clientId, string clientSecret, string userName) { string[] scopes = new string[] { BigqueryService.Scope.Bigquery, // view and manage your BigQuery data BigqueryService.Scope.BigqueryInsertdata , // Insert Data into Big query BigqueryService.Scope.CloudPlatform, // view and manage your data acroos cloud platform services BigqueryService.Scope.DevstorageFullControl, // manage your data on Cloud platform services BigqueryService.Scope.DevstorageReadOnly , // view your data on cloud platform servies BigqueryService.Scope.DevstorageReadWrite }; // manage your data on cloud platform servies try { // here is where we Request the user to give us access, or use the Refresh Token that was previously stored in %AppData% UserCredential credential = GoogleWebAuthorizationBroker.AuthorizeAsync(new ClientSecrets { ClientId = clientId, ClientSecret = clientSecret } , scopes , userName , CancellationToken.None , new FileDataStore("Daimto.BigQuery.Auth.Store")).Result; BigqueryService service = new BigqueryService(new BaseClientService.Initializer() { HttpClientInitializer = credential, ApplicationName = "BigQuery API Sample", }); return service; } catch (Exception ex) { Console.WriteLine(ex.InnerException); return null; } }
/// <summary> /// Lists all tables in the specified dataset. Requires the READER dataset role. /// Documentation https://developers.google.com/bigquery/v2/reference/tables/list /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">Project ID of the tables to list</param> /// <param name="datasetId">Dataset ID of the tables to list</param> /// <param name="optional">Optional paramaters.</param> /// <returns>TableListResponse</returns> public static TableList List(BigqueryService service, string projectId, string datasetId, TablesListOptionalParms optional = null) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (projectId == null) { throw new ArgumentNullException(projectId); } if (datasetId == null) { throw new ArgumentNullException(datasetId); } // Building the initial request. var request = service.Tables.List(projectId, datasetId); // Applying optional parameters to the request. request = (TablesResource.ListRequest)SampleHelpers.ApplyOptionalParms(request, optional); // Requesting data. return(request.Execute()); } catch (Exception ex) { throw new Exception("Request Tables.List failed.", ex); } }
/// <summary> /// Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted. /// Documentation https://developers.google.com/bigquery/v2/reference/tables/delete /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">Project ID of the table to delete</param> /// <param name="datasetId">Dataset ID of the table to delete</param> /// <param name="tableId">Table ID of the table to delete</param> public static void Delete(BigqueryService service, string projectId, string datasetId, string tableId) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (projectId == null) { throw new ArgumentNullException(projectId); } if (datasetId == null) { throw new ArgumentNullException(datasetId); } if (tableId == null) { throw new ArgumentNullException(tableId); } // Make the request. service.Tables.Delete(projectId, datasetId, tableId).Execute(); } catch (Exception ex) { throw new Exception("Request Tables.Delete failed.", ex); } }
/// <summary> /// Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. /// Documentation https://developers.google.com/bigquery/v2/reference/datasets/update /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">Project ID of the dataset being updated</param> /// <param name="datasetId">Dataset ID of the dataset being updated</param> /// <param name="body">A valid Bigquery v2 body.</param> /// <returns>DatasetResponse</returns> public static Dataset Update(BigqueryService service, string projectId, string datasetId, Dataset body) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (body == null) { throw new ArgumentNullException("body"); } if (projectId == null) { throw new ArgumentNullException(projectId); } if (datasetId == null) { throw new ArgumentNullException(datasetId); } // Make the request. return(service.Datasets.Update(body, projectId, datasetId).Execute()); } catch (Exception ex) { throw new Exception("Request Datasets.Update failed.", ex); } }
/// <summary> /// Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name. /// Documentation https://developers.google.com/bigquery/v2/reference/datasets/delete /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">Project ID of the dataset being deleted</param> /// <param name="datasetId">Dataset ID of dataset being deleted</param> /// <param name="optional">Optional paramaters.</param> public static void Delete(BigqueryService service, string projectId, string datasetId, DatasetsDeleteOptionalParms optional = null) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (projectId == null) { throw new ArgumentNullException(projectId); } if (datasetId == null) { throw new ArgumentNullException(datasetId); } // Building the initial request. var request = service.Datasets.Delete(projectId, datasetId); // Applying optional parameters to the request. request = (DatasetsResource.DeleteRequest)SampleHelpers.ApplyOptionalParms(request, optional); // Requesting data. request.Execute(); } catch (Exception ex) { throw new Exception("Request Datasets.Delete failed.", ex); } }
/// <summary> /// Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role. /// Documentation https://developers.google.com/bigquery/v2/reference/jobs/get /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">[Required] Project ID of the requested job</param> /// <param name="jobId">[Required] Job ID of the requested job</param> /// <returns>JobResponse</returns> public static Job Get(BigqueryService service, string projectId, string jobId) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (projectId == null) { throw new ArgumentNullException(projectId); } if (jobId == null) { throw new ArgumentNullException(jobId); } // Make the request. return(service.Jobs.Get(projectId, jobId).Execute()); } catch (Exception ex) { throw new Exception("Request Jobs.Get failed.", ex); } }
/// <summary> /// Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout. /// Documentation https://developers.google.com/bigquery/v2/reference/jobs/query /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">Project ID of the project billed for the query</param> /// <param name="body">A valid Bigquery v2 body.</param> /// <returns>QueryResponseResponse</returns> public static QueryResponse Query(BigqueryService service, string projectId, QueryRequest body) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (body == null) { throw new ArgumentNullException("body"); } if (projectId == null) { throw new ArgumentNullException(projectId); } // Make the request. return(service.Jobs.Query(body, projectId).Execute()); } catch (Exception ex) { throw new Exception("Request Jobs.Query failed.", ex); } }
/// <summary> /// Retrieves the results of a query job. /// Documentation https://developers.google.com/bigquery/v2/reference/jobs/getQueryResults /// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong. /// </summary> /// <param name="service">Authenticated Bigquery service.</param> /// <param name="projectId">[Required] Project ID of the query job</param> /// <param name="jobId">[Required] Job ID of the query job</param> /// <param name="optional">Optional paramaters.</param> /// <returns>GetQueryResultsResponseResponse</returns> public static GetQueryResultsResponse GetQueryResults(BigqueryService service, string projectId, string jobId, JobsGetQueryResultsOptionalParms optional = null) { try { // Initial validation. if (service == null) { throw new ArgumentNullException("service"); } if (projectId == null) { throw new ArgumentNullException(projectId); } if (jobId == null) { throw new ArgumentNullException(jobId); } // Building the initial request. var request = service.Jobs.GetQueryResults(projectId, jobId); // Applying optional parameters to the request. request = (JobsResource.GetQueryResultsRequest)SampleHelpers.ApplyOptionalParms(request, optional); // Requesting data. return(request.Execute()); } catch (Exception ex) { throw new Exception("Request Jobs.GetQueryResults failed.", ex); } }
public static BigQueryContext GetContext(string json, string user, string projectId) { BigQueryContext context; using (var ms = new MemoryStream(Encoding.UTF8.GetBytes(json))) { // Open Browser, Accept Auth var userCredential = GoogleWebAuthorizationBroker.AuthorizeAsync(ms, new[] { BigqueryService.Scope.Bigquery }, user, CancellationToken.None, new FileDataStore(@"LINQ-to-BigQuery-for-" + projectId)) // localcache .GetAwaiter().GetResult(); var bigquery = new BigqueryService(new BaseClientService.Initializer { ApplicationName = "LINQ to BigQuery", HttpClientInitializer = userCredential }); context = new BigQueryContext(bigquery, projectId); } // Timeout or other options context.TimeoutMs = (long)TimeSpan.FromMinutes(3).TotalMilliseconds; return(context); }
/// <param name="description">[Optional] A user-friendly description of this table.</param> /// <param name="expirationTime"> /// <para>[Optional] The time when this table expires, in milliseconds since the epoch.</para> /// <para>If not present, the table will persist indefinitely. Expired tables will</para> /// <para>be deleted and their storage reclaimed.</para> /// </param> /// <param name="friendlyName">[Optional] A descriptive name for this table.</param> public void CreateTable(BigqueryService service, TableFieldSchema[] fields, string description = null, long?expirationTime = null, string friendlyName = null) { var r = service.Tables.Insert(new Table() { Description = description, ExpirationTime = expirationTime, FriendlyName = friendlyName, TableReference = new TableReference { ProjectId = this.project_id, DatasetId = this.dataset_id, TableId = this.table_id, }, Schema = new TableSchema() { Fields = fields } }, this.project_id, this.dataset_id).Execute(); if (r.CreationTime != null) { this.creation_time = r.CreationTime.Value; } if (r.LastModifiedTime != null) { this.last_modified_time = r.LastModifiedTime.Value; } }
private static BigQueryContext GetContext(string json, string user, string projectId) { BigQueryContext context; using (var ms = new MemoryStream(Encoding.UTF8.GetBytes(json))) { // Open Browser, Accept Auth var userCredential = GoogleWebAuthorizationBroker.AuthorizeAsync(ms, new[] { BigqueryService.Scope.Bigquery }, user, CancellationToken.None, new FileDataStore(@"LINQ-to-BigQuery-for-" + projectId)) // localcache .Result; var bigquery = new BigqueryService(new BaseClientService.Initializer { ApplicationName = "LINQ to BigQuery", HttpClientInitializer = userCredential }); context = new BigQueryContext(bigquery, projectId); } // Timeout or other options context.TimeoutMs = (long)TimeSpan.FromMinutes(1).TotalMilliseconds; return context; }
public void Dispose() { if (_BQSvc != null) { _BQSvc.Dispose(); _BQSvc = null; } }
/// <inheritdoc /> public override BigQueryClient Build() { Validate(); var initializer = CreateServiceInitializer(); initializer.Serializer = new NewtonsoftJsonSerializer(BigQueryClient.CreateJsonSerializersSettings()); var service = new BigqueryService(initializer); return(new BigQueryClientImpl(ProjectId, service, DefaultLocation)); }
private QueryResponse ExecuteQuery(BigqueryService service) { var jobResource = service.Jobs; var qr = new QueryRequest() { Query = query }; return(jobResource.Query(qr, projectId).Execute()); }
/// <summary> /// Installs the retry handler into the service, if it doesn't already exist. /// </summary> /// <param name="service"></param> internal static void Install(BigqueryService service) { // TODO: This isn't safe... we could end up removing the handler while it's // in the middle of a request in another thread. It's also inefficient... // We need support in ConfigurableMessageHandler for this. var messageHandler = service.HttpClient.MessageHandler; messageHandler.RemoveUnsuccessfulResponseHandler(s_instance); messageHandler.AddUnsuccessfulResponseHandler(s_instance); }
private static BigQueryClient CreateImpl(string projectId, GoogleCredential scopedCredentials) { var service = new BigqueryService(new BaseClientService.Initializer { HttpClientInitializer = scopedCredentials, ApplicationName = BigQueryClientImpl.ApplicationName, }); return(new BigQueryClientImpl(projectId, service)); }
/// <inheritdoc /> public override async Task <BigQueryClient> BuildAsync(CancellationToken cancellationToken = default) { Validate(); var initializer = await CreateServiceInitializerAsync(cancellationToken).ConfigureAwait(false); initializer.Serializer = new NewtonsoftJsonSerializer(BigQueryClient.CreateJsonSerializersSettings()); var service = new BigqueryService(initializer); return(new BigQueryClientImpl(ProjectId, service, DefaultLocation)); }
// [END list_datasets] /// <summary> /// Lists all Projects. /// </summary> // [START list_projects] public IEnumerable <ProjectList.ProjectsData> ListProjects() { BigqueryService bigquery = CreateAuthorizedClient(); var projectRequest = new ProjectsResource.ListRequest(bigquery); // Sometimes Projects will be null instead of an empty list. // It's easy to forget that and dereference null. So, catch // that case and return an empty list. return(projectRequest.Execute().Projects ?? new ProjectList.ProjectsData[] { }); }
public DataTable ExecuteSQLQuery(BigqueryService bqservice, String ProjectID, string sSql) { QueryRequest _r = new QueryRequest(); _r.Query = sSql; QueryResponse _qr = bqservice.Jobs.Query(_r, ProjectID).Execute(); DataTable dt = new DataTable(); string pageToken = null; while (!(bool)_qr.JobComplete) { Thread.Sleep(1000); } //job not finished yet! expecting more data while (true) { var resultReq = bqservice.Jobs.GetQueryResults(_qr.JobReference.ProjectId, _qr.JobReference.JobId); resultReq.PageToken = pageToken; var result = resultReq.Execute(); while (!(bool)result.JobComplete) { Thread.Sleep(1000); } if (result.JobComplete == true) { if (dt.Columns.Count == 0) { foreach (var Column in result.Schema.Fields) { dt.Columns.Add(Column.Name); } } foreach (TableRow row in result.Rows) { DataRow dr = dt.NewRow(); for (var i = 0; i < dt.Columns.Count; i++) { dr[i] = row.F[i].V; } dt.Rows.Add(dr); } pageToken = result.PageToken; if (pageToken == null) { break; } } } return(dt); }
// [END build_service] // [START run_query] /// <summary> /// Executes the given query synchronously. /// </summary> /// <param name="querySql">the query to execute.</param> /// <param name="bigquery">the BigquerService object.</param> /// <param name="projectId">the id of the project under which to run the query.</param> /// <returns>a list of the results of the query.</returns> private static async Task<IList<TableRow>> ExecuteQueryAsync( string querySql, BigqueryService bigquery, string projectId) { JobConfigurationQuery queryConfig = new JobConfigurationQuery { Query = querySql }; JobConfiguration config = new JobConfiguration { Query = queryConfig }; Job job = new Job { Configuration = config }; JobsResource.InsertRequest insert = bigquery.Jobs.Insert(job, projectId); JobReference jobRef = (await insert.ExecuteAsync()).JobReference; GetQueryResultsResponse queryResult = await bigquery.Jobs.GetQueryResults(projectId, jobRef.JobId).ExecuteAsync(); return queryResult.Rows; }
/// <summary> /// Documentation: https://cloud.google.com/bigquery/docs/reference/v2/projects/list /// </summary> /// <param name="service"></param> /// <returns></returns> public static ProjectList listProjects(BigqueryService service) { var request = service.Projects.List(); request.MaxResults = 100; return(ProcessResults(request)); }
// [END sync_query] /// <summary>Inserts an asynchronous query Job for a particular query.</summary> /// <param name="bigquery">an authorized BigQuery client</param> /// <param name="projectId">a string containing the project ID</param> /// <param name="querySql">the actual query string</param> /// <param name="batch">true if you want to run the query as BATCH</param> /// <returns>a reference to the inserted query job</returns> // [START async_query] public static Job AsyncQuery(BigqueryService bigquery, string projectId, string querySql, bool batch) { JobConfigurationQuery queryConfig = new JobConfigurationQuery { Query = querySql, Priority = batch ? "BATCH" : null }; JobConfiguration config = new JobConfiguration { Query = queryConfig }; Job job = new Job { Configuration = config }; return bigquery.Jobs.Insert(job, projectId).Execute(); }
// [END build_service] // [START run_query] /// <summary> /// Executes the given query synchronously. /// </summary> /// <param name="querySql">the query to execute.</param> /// <param name="bigquery">the BigquerService object.</param> /// <param name="projectId">the id of the project under which to run the query.</param> /// <returns>a list of the results of the query.</returns> public static async Task<IList<TableRow>> ExecuteQueryAsync( string querySql, BigqueryService bigquery, string projectId) { var request = new Google.Apis.Bigquery.v2.JobsResource.QueryRequest( bigquery, new Google.Apis.Bigquery.v2.Data.QueryRequest() { Query = querySql, }, projectId); var query = await request.ExecuteAsync(); GetQueryResultsResponse queryResult = await bigquery.Jobs.GetQueryResults(projectId, query.JobReference.JobId).ExecuteAsync(); return queryResult.Rows; }
// [END poll_job] /// <summary> /// Lists all Datasets in a project specified by the projectId. /// </summary> /// <param name="projectId">The projectId from which lists the existing /// Datasets.</param> // [START list_datasets] public IEnumerable <DatasetList.DatasetsData> ListDatasets( string projectId) { BigqueryService bigquery = CreateAuthorizedClient(); var datasetRequest = new DatasetsResource.ListRequest(bigquery, projectId); // Sometimes Datasets will be null instead of an empty list. // It's easy to forget that and dereference null. So, catch // that case and return an empty list. return(datasetRequest.Execute().Datasets ?? new DatasetList.DatasetsData[] { }); }
// [END build_service] public IList <TableRow> ExecuteQuery(string querySql, BigqueryService bigquery, string projectId) { var request = new Google.Apis.Bigquery.v2.JobsResource.QueryRequest( bigquery, new Google.Apis.Bigquery.v2.Data.QueryRequest() { Query = querySql, }, projectId); var query = request.Execute(); GetQueryResultsResponse queryResult = bigquery.Jobs.GetQueryResults( projectId, query.JobReference.JobId).Execute(); return(queryResult.Rows); }
// [END build_service] /// <summary> /// Pages through the results of an arbitrary Bigquery request. /// </summary> /// <param name="bigquery">The bigquery service.</param> /// <param name="jobRef">The job whose rows will be fetched.</param> /// <param name="rowsPerPage">How many rows to fetch in each http request?</param> /// <returns>An IEnumerable of rows.</returns> // [START paging] public static IEnumerable<TableRow> GetRows(BigqueryService bigquery, JobReference jobRef, long? rowsPerPage = null) { var request = new JobsResource.GetQueryResultsRequest( bigquery, jobRef.ProjectId,jobRef.JobId); request.MaxResults = rowsPerPage; do { var response = request.Execute(); if (response.Rows != null) foreach (var row in response.Rows) yield return row; request.PageToken = response.PageToken; } while (!String.IsNullOrEmpty(request.PageToken)); }
private void Initialize(BigQueryOutputConfiguration bqOutputConfiguration) { Debug.Assert(bqOutputConfiguration != null); Debug.Assert(this.healthReporter != null); if (String.IsNullOrEmpty(bqOutputConfiguration.ProjectId)) { throw new ArgumentException("ProjectId"); } if (String.IsNullOrEmpty(bqOutputConfiguration.DatasetId)) { throw new ArgumentException("DatasetId"); } if (String.IsNullOrEmpty(bqOutputConfiguration.TableId)) { throw new ArgumentException("TableId"); } this.Config = bqOutputConfiguration; // Load table schema file if (Config.TableSchemaFile != null) { TableSchema = LoadTableSchema(Config.TableSchemaFile); } if (TableSchema == null) { throw new Exception("table schema not set"); } // Expand table id 1st time within force mode ExpandTableIdIfNecessary(force: true); // configure finished healthReporter.ReportHealthy("TableId: " + TableIdExpanded); var scopes = new[] { BigqueryService.Scope.Bigquery, BigqueryService.Scope.BigqueryInsertdata, BigqueryService.Scope.CloudPlatform, BigqueryService.Scope.DevstorageFullControl }; var credential = GoogleCredential.GetApplicationDefault() .CreateScoped(scopes); _BQSvc = new BigqueryService(new BaseClientService.Initializer { HttpClientInitializer = credential, }); _BackOff = new ExponentialBackOff(); }
/// <summary> /// Documentation: https://cloud.google.com/bigquery/docs/reference/v2/projects/list /// </summary> /// <param name="service"></param> /// <returns></returns> public static ProjectList listProjects(BigqueryService service) { var request = service.Projects.List(); request.MaxResults = 100; return ProcessResults(request); }
private string AddNewPhotoBigQuery(Photo photo) { try { GoogleCredential credential; using (Stream stream = new FileStream(bigqueryFileKey, FileMode.Open, FileAccess.Read, FileShare.Read)) { credential = GoogleCredential.FromStream(stream); } string[] scopes = new string[] { BigqueryService.Scope.Bigquery, BigqueryService.Scope.CloudPlatform, }; credential = credential.CreateScoped(scopes); BaseClientService.Initializer initializer = new BaseClientService.Initializer() { HttpClientInitializer = (IConfigurableHttpClientInitializer)credential, ApplicationName = bigqueryApplicationName, GZipEnabled = true, }; BigqueryService service = new BigqueryService(initializer); var rowList = new List <TableDataInsertAllRequest.RowsData>(); // Check @ https://developers.google.com/bigquery/streaming-data-into-bigquery for InsertId usage var row = new TableDataInsertAllRequest.RowsData(); row.Json = new Dictionary <string, object>(); row.Json.Add("Id", photo.Id); row.Json.Add("Title", photo.Title); row.Json.Add("Url", photo.Url); rowList.Add(row); var content = new TableDataInsertAllRequest(); content.Rows = rowList; content.Kind = "bigquery#tableDataInsertAllRequest"; content.IgnoreUnknownValues = true; content.SkipInvalidRows = true; var requestResponse = service.Tabledata.InsertAll(content, bigqueryProjectId, "dsbigquery", "Photo").Execute(); return("true"); } catch (Exception ex) { return(ex.Message); } }
/// <summary> /// Lists all the datasets in the specified project to which the caller has read access; however, a project owner can list (but not necessarily get) all datasets in his project. /// /// Documentation: https://cloud.google.com/bigquery/docs/reference/v2/datasets/list /// </summary> /// <param name="service"></param> /// <param name="projectId"></param> /// <param name="optionalValues"></param> /// <returns></returns> public static DatasetList list(BigqueryService service, string projectId, OptionalValues optionalValues) { var request = service.Datasets.List(projectId); if (optionalValues == null) { request.MaxResults = 100; } else { request.MaxResults = optionalValues.MaxResults; request.All = optionalValues.ShowHidden; } return ProcessResults(request); }
// BigQuery public static BigqueryService GetService() { var certificateFileName = ConfigurationManager.AppSettings["certificateFileName"]; var certificatePassword = ConfigurationManager.AppSettings["certificatePassword"]; var serviceAccountEmail = ConfigurationManager.AppSettings["serviceAccountEmail"]; var applicationName = ConfigurationManager.AppSettings["applicationName"]; var path = Path.Combine(new FileInfo(Assembly.GetExecutingAssembly().Location).Directory.FullName, certificateFileName); var credential = new ServiceAccountCredential(new ServiceAccountCredential.Initializer(serviceAccountEmail) { Scopes = new[] { BigqueryService.Scope.Bigquery, BigqueryService.Scope.BigqueryInsertdata }, }.FromCertificate(new X509Certificate2(path, certificatePassword, X509KeyStorageFlags.Exportable))); var bigquery = new BigqueryService(new BaseClientService.Initializer { ApplicationName = applicationName, HttpClientInitializer = credential }); return bigquery; }
// [END paging] /// <summary>Perform the given query using the synchronous api.</summary> /// <param name="projectId">project id from developer console</param> /// <param name="queryString">query to run</param> /// <param name="timeoutMs">Timeout in milliseconds before we abort</param> /// <returns>The results of the query</returns> /// <exception cref="TimeoutException">If the request times out.</exception> // [START sync_query] public static IEnumerable<TableRow> SyncQuery(BigqueryService bigquery, string projectId, string queryString, long timeoutMs) { var query = new JobsResource.QueryRequest( bigquery, new QueryRequest() { Query = queryString, TimeoutMs = timeoutMs, }, projectId).Execute(); if (query.JobComplete != true) throw new TimeoutException(); return query.Rows ?? new TableRow[] { }; }
// [END list_datasets] /// <summary> /// Lists all Projects. /// </summary> /// <param name="bigquery">The BigQuery object.</param> // [START list_projects] public static IEnumerable<ProjectList.ProjectsData> ListProjects(BigqueryService bigquery) { var projectRequest = new ProjectsResource.ListRequest(bigquery); // Sometimes Projects will be null instead of an empty list. // It's easy to forget that and dereference null. So, catch // that case and return an empty list. return projectRequest.Execute().Projects ?? new ProjectList.ProjectsData[] { }; }
// [END poll_job] /// <summary> /// Lists all Datasets in a project specified by the projectId. /// </summary> /// <param name="bigquery">The BigQuery object.</param> /// <param name="projectId">The projectId from which lists the existing Datasets. /// </param> // [START list_datasets] public static IEnumerable<DatasetList.DatasetsData> ListDatasets(BigqueryService bigquery, string projectId) { var datasetRequest = new DatasetsResource.ListRequest(bigquery, projectId); // Sometimes Datasets will be null instead of an empty list. // It's easy to forget that and dereference null. So, catch // that case and return an empty list. return datasetRequest.Execute().Datasets ?? new DatasetList.DatasetsData[] { }; }
/// <summary> /// Authenticating to Google using a Service account /// Documentation: https://developers.google.com/accounts/docs/OAuth2#serviceaccount /// </summary> /// <param name="serviceAccountEmail">From Google Developer console https://console.developers.google.com</param> /// <param name="keyFilePath">Location of the Service account key file downloaded from Google Developer console https://console.developers.google.com</param> /// <returns></returns> public static BigqueryService AuthenticateServiceAccount(string serviceAccountEmail, string keyFilePath) { // check the file exists if (!File.Exists(keyFilePath)) { Console.WriteLine("An Error occurred - Key file does not exist"); return null; } string[] scopes = new string[] { BigqueryService.Scope.Bigquery, // view and manage your BigQuery data BigqueryService.Scope.BigqueryInsertdata , // Insert Data into Big query BigqueryService.Scope.CloudPlatform, // view and manage your data acroos cloud platform services BigqueryService.Scope.DevstorageFullControl, // manage your data on Cloud platform services BigqueryService.Scope.DevstorageReadOnly , // view your data on cloud platform servies BigqueryService.Scope.DevstorageReadWrite }; // manage your data on cloud platform servies var certificate = new X509Certificate2(keyFilePath, "notasecret", X509KeyStorageFlags.Exportable); try { ServiceAccountCredential credential = new ServiceAccountCredential( new ServiceAccountCredential.Initializer(serviceAccountEmail) { Scopes = scopes }.FromCertificate(certificate)); // Create the service. BigqueryService service = new BigqueryService(new BaseClientService.Initializer() { HttpClientInitializer = credential, ApplicationName = "BigQuery API Sample", }); return service; } catch (Exception ex) { Console.WriteLine(ex.InnerException); return null; } }