// [END custom_attribute_job // [START custom_attribute_filter_string_value] public static void FiltersOnStringValueCustomAttribute() { RequestMetadata requestMetadata = new RequestMetadata() { UserId = "HashedUserId", SessionId = "HashedSessionId", Domain = "www.google.com" }; string customAttributeFilter = "NOT EMPTY(custom_attribute)"; JobQuery jobQuery = new JobQuery() { CustomAttributeFilter = customAttributeFilter }; SearchJobsRequest searchJobsRequest = new SearchJobsRequest() { JobQuery = jobQuery, RequestMetadata = requestMetadata, JobView = "JOB_VIEW_FULL" }; SearchJobsResponse response = jobServiceClient.Projects.Jobs.Search(searchJobsRequest, parent).Execute(); Console.WriteLine("Searched on custom attribute: " + ToJsonString(response)); }
// [END custom_attribute_filter_long_value] // [START custom_attribute_filter_multi_attributes public static void FiltersOnMultiCustomAttributes() { RequestMetadata requestMetadata = new RequestMetadata() { UserId = "HashedUserId", SessionId = "HashedSessionId", Domain = "www.google.com" }; string customAttributeFilter = "(someFiledName1 = \"value1\") " + "AND ((255 <= someFieldName2) OR (someFieldName2 <= 213))"; JobQuery jobQuery = new JobQuery() { CustomAttributeFilter = customAttributeFilter }; SearchJobsRequest searchJobsRequest = new SearchJobsRequest() { JobQuery = jobQuery, RequestMetadata = requestMetadata, JobView = "JOB_VIEW_FULL" }; SearchJobsResponse response = jobServiceClient.Projects.Jobs.Search(searchJobsRequest, parent).Execute(); Console.WriteLine("Searched on cross-field-filtering: " + ToJsonString(response)); }
// [END compensation_filter] public static void BasicSearchJobs(string companyName, string query) { RequestMetadata requestMetadata = new RequestMetadata() { // Make sure to hash your userID UserId = "HashedUserId", // Make sure to hash the sessionID SessionId = "HashedSessionId", // Domain of the website where the search is conducted Domain = "www.google.com" }; JobQuery jobQuery = new JobQuery() { Query = query, CompanyNames = new List <string> { companyName } }; SearchJobsRequest searchJobRequest = new SearchJobsRequest() { RequestMetadata = requestMetadata, JobQuery = jobQuery, SearchMode = "JOB_SEARCH" }; SearchJobsResponse searchJobsResponse = jobServiceClient.Projects.Jobs.Search(searchJobRequest, parent).Execute(); Console.WriteLine("Jobs searched: " + ToJsonString(searchJobsResponse)); }
internal virtual SearchJobsResponse SearchJobs(SearchJobsRequest request) { var options = new InvokeOptions(); options.RequestMarshaller = SearchJobsRequestMarshaller.Instance; options.ResponseUnmarshaller = SearchJobsResponseUnmarshaller.Instance; return(Invoke <SearchJobsResponse>(request, options)); }
/// <summary> /// Searches for Amazon Braket jobs that match the specified filter values. /// </summary> /// <param name="request">Container for the necessary parameters to execute the SearchJobs service method.</param> /// <param name="cancellationToken"> /// A cancellation token that can be used by other objects or threads to receive notice of cancellation. /// </param> /// /// <returns>The response from the SearchJobs service method, as returned by Braket.</returns> /// <exception cref="Amazon.Braket.Model.AccessDeniedException"> /// You do not have sufficient access to perform this action. /// </exception> /// <exception cref="Amazon.Braket.Model.InternalServiceException"> /// The request processing has failed because of an unknown error, exception, or failure. /// </exception> /// <exception cref="Amazon.Braket.Model.ThrottlingException"> /// The throttling rate limit is met. /// </exception> /// <exception cref="Amazon.Braket.Model.ValidationException"> /// The input fails to satisfy the constraints specified by an AWS service. /// </exception> /// <seealso href="http://docs.aws.amazon.com/goto/WebAPI/braket-2019-09-01/SearchJobs">REST API Reference for SearchJobs Operation</seealso> public virtual Task <SearchJobsResponse> SearchJobsAsync(SearchJobsRequest request, System.Threading.CancellationToken cancellationToken = default(CancellationToken)) { var options = new InvokeOptions(); options.RequestMarshaller = SearchJobsRequestMarshaller.Instance; options.ResponseUnmarshaller = SearchJobsResponseUnmarshaller.Instance; return(InvokeAsync <SearchJobsResponse>(request, options, cancellationToken)); }
// [END company_display_name_filter] // [START compensation_filter] public static void CompensationSearch(string companyName) { RequestMetadata requestMetadata = new RequestMetadata() { // Make sure to hash your userID UserId = "HashedUserId", // Make sure to hash the sessionID SessionId = "HashedSessionId", // Domain of the website where the search is conducted Domain = "www.google.com" }; JobQuery jobQuery = new JobQuery() { CompensationFilter = new CompensationFilter() { Type = "UNIT_AND_AMOUNT", Units = new List <string> { "HOURLY" }, Range = new CompensationRange() { MaxCompensation = new Money() { CurrencyCode = "USD", Units = 15L }, MinCompensation = new Money() { CurrencyCode = "USD", Units = 10L, Nanos = 500000000 } } } }; if (companyName != null) { jobQuery.CompanyNames = new List <string> { companyName }; } SearchJobsRequest searchJobRequest = new SearchJobsRequest() { RequestMetadata = requestMetadata, JobQuery = jobQuery, SearchMode = "JOB_SEARCH" }; SearchJobsResponse searchJobsResponse = jobServiceClient.Projects.Jobs.Search(searchJobRequest, parent).Execute(); Console.WriteLine("Jobs compensation searched: " + ToJsonString(searchJobsResponse)); }
// [END city_location_search] // [START multi_location_search] public static void MultiLocationSearch(string companyName, string location1, double distance1, string location2) { RequestMetadata requestMetadata = new RequestMetadata() { // Make sure to hash your userID UserId = "HashedUserId", // Make sure to hash the sessionID SessionId = "HashedSessionId", // Domain of the website where the search is conducted Domain = "www.google.com" }; LocationFilter locationFilter1 = new LocationFilter() { Address = location1, DistanceInMiles = distance1 }; LocationFilter locationFilter2 = new LocationFilter() { Address = location2 }; JobQuery jobQuery = new JobQuery() { LocationFilters = new List <LocationFilter>() { locationFilter1, locationFilter2 } }; if (companyName != null) { jobQuery.CompanyNames = new List <string> { companyName }; } SearchJobsRequest searchJobRequest = new SearchJobsRequest() { RequestMetadata = requestMetadata, JobQuery = jobQuery, SearchMode = "JOB_SEARCH" }; SearchJobsResponse searchJobsResponse = jobServiceClient.Projects.Jobs.Search(searchJobRequest, parent).Execute(); Console.WriteLine("Jobs multi location searched: " + ToJsonString(searchJobsResponse)); }
// [START histogram_search] public static void HistogramSearch(String companyName) { RequestMetadata requestMetadata = new RequestMetadata() { // Make sure to hash your userID UserId = "HashedUserId", // Make sure to hash the sessionID SessionId = "HashedSessionId", // Domain of the website where the search is conducted Domain = "www.google.com" }; HistogramFacets histogramFacets = new HistogramFacets() { SimpleHistogramFacets = new List <String> { "COMPANY_ID" }, CustomAttributeHistogramFacets = new List <CustomAttributeHistogramRequest> { new CustomAttributeHistogramRequest() { Key = "someFieldName1", StringValueHistogram = true } } }; SearchJobsRequest searchJobsRequest = new SearchJobsRequest() { RequestMetadata = requestMetadata, SearchMode = "JOB_SEARCH", HistogramFacets = histogramFacets }; if (companyName != null) { searchJobsRequest.JobQuery = new JobQuery() { CompanyNames = new List <string> { companyName } }; } SearchJobsResponse searchJobsResponse = jobServiceClient.Projects.Jobs.Search(searchJobsRequest, parent).Execute(); Console.WriteLine("Histogram search: " + ToJsonString(searchJobsResponse)); }
/// <summary>Snippet for SearchJobsForAlertAsync</summary> public async Task SearchJobsForAlertAsync_RequestObject() { // Snippet: SearchJobsForAlertAsync(SearchJobsRequest,CallSettings) // Create client JobServiceClient jobServiceClient = await JobServiceClient.CreateAsync(); // Initialize request argument(s) SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantOrProjectNameOneof = TenantOrProjectNameOneof.From(new TenantName("[PROJECT]", "[TENANT]")), RequestMetadata = new RequestMetadata(), }; // Make the request PagedAsyncEnumerable <SearchJobsResponse, MatchingJob> response = jobServiceClient.SearchJobsForAlertAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((SearchJobsResponse.Types.MatchingJob item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchJobsResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (SearchJobsResponse.Types.MatchingJob item in page) { Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page <SearchJobsResponse.Types.MatchingJob> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (SearchJobsResponse.Types.MatchingJob item in singlePage) { Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet }
// [START commute_search] public static void CommuteSearch(string companyName) { RequestMetadata requestMetadata = new RequestMetadata() { // Make sure to hash your userID UserId = "HashedUserId", // Make sure to hash the sessionID SessionId = "HashedSessionId", // Domain of the website where the search is conducted Domain = "www.google.com" }; JobQuery jobQuery = new JobQuery() { CommuteFilter = new CommuteFilter() { RoadTraffic = "TRAFFIC_FREE", CommuteMethod = "TRANSIT", TravelDuration = "1000s", StartCoordinates = new LatLng() { Latitude = 37.42208, Longitude = -122.085609 } } }; if (companyName != null) { jobQuery.CompanyNames = new List <string> { companyName }; } SearchJobsRequest searchJobRequest = new SearchJobsRequest() { RequestMetadata = requestMetadata, JobQuery = jobQuery, JobView = "JOB_VIEW_FULL", RequirePreciseResultSize = true }; SearchJobsResponse searchJobsResponse = jobServiceClient.Projects.Jobs.Search(searchJobRequest, parent).Execute(); Console.WriteLine("Jobs commute searched: " + ToJsonString(searchJobsResponse)); }
/// <summary>Snippet for SearchJobsForAlert</summary> public void SearchJobsForAlert_RequestObject() { // Snippet: SearchJobsForAlert(SearchJobsRequest,CallSettings) // Create client JobServiceClient jobServiceClient = JobServiceClient.Create(); // Initialize request argument(s) SearchJobsRequest request = new SearchJobsRequest { ParentAsProjectName = new ProjectName("[PROJECT]"), RequestMetadata = new RequestMetadata(), }; // Make the request PagedEnumerable <SearchJobsResponse, MatchingJob> response = jobServiceClient.SearchJobsForAlert(request); // Iterate over all response items, lazily performing RPCs as required foreach (SearchJobsResponse.Types.MatchingJob item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchJobsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (SearchJobsResponse.Types.MatchingJob item in page) { Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page <SearchJobsResponse.Types.MatchingJob> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (SearchJobsResponse.Types.MatchingJob item in singlePage) { Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet }
// [START job_search_custom_ranking_search] public static object CustomRankingSearch(string projectId, string tenantId) { JobServiceClient jobServiceClient = JobServiceClient.Create(); TenantName name = new TenantName(projectId, tenantId); string domain = "www.example.com"; string sessionId = "Hashed session identifier"; string userId = "Hashed user identifier"; RequestMetadata requestMetadata = new RequestMetadata { Domain = domain, SessionId = sessionId, UserId = userId }; CustomRankingInfo customRankingInfo = new CustomRankingInfo { ImportanceLevel = ImportanceLevel.Extreme, // Custom ranking supports math operators, and Field name can be CPC or Freshness // https://cloud.google.com/talent-solution/job-search/docs/custom-ranking#how_to_use RankingExpression = "(someFieldLong + 25) * 0.25" }; string orderBy = "custom_ranking desc"; SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantOrProjectNameOneof = TenantOrProjectNameOneof.From(name), CustomRankingInfo = customRankingInfo, RequestMetadata = requestMetadata, OrderBy = orderBy }; var response = jobServiceClient.SearchJobs(request); foreach (var result in response) { Console.WriteLine($"Job summary: {result.JobSummary}"); Console.WriteLine($"Job title snippet: {result.JobTitleSnippet}"); Job job = result.Job; Console.WriteLine($"Job name: {job.Name}"); Console.WriteLine($"Job title: {job.Title}"); } return(0); }
// [START job_search_histogram_search] public static object HistogramSearchJobs(string projectId, string tenantId, string query) { JobServiceClient jobServiceClient = JobServiceClient.Create(); TenantName name = TenantName.FromProjectTenant(projectId, tenantId); string domain = "www.example.com"; string sessionId = "Hashed session identifier"; string userId = "Hashed user identifier"; RequestMetadata requestMetadata = new RequestMetadata { Domain = domain, SessionId = sessionId, UserId = userId }; // Examples and formats are explained in the following link: // https://cloud.google.com/talent-solution/job-search/docs/reference/rest/v4beta1/projects.tenants.jobs/search#body.request_body.FIELDS.histogram_queries HistogramQuery histogramQuery = new HistogramQuery { HistogramQuery_ = query }; SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantName = name, RequestMetadata = requestMetadata, }; request.HistogramQueries.Add(histogramQuery); var response = jobServiceClient.SearchJobs(request); foreach (var result in response) { Console.WriteLine($"Job summary: {result.JobSummary}"); Console.WriteLine($"Job title snippet: {result.JobTitleSnippet}"); Job job = result.Job; Console.WriteLine($"Job name: {job.Name}"); Console.WriteLine($"Job title: {job.Title}"); } return(0); }
// [END employment_types_filter // [START date_range_filter] public static void DateRangeSearch(string companyName, string startTime, string endTime) { RequestMetadata requestMetadata = new RequestMetadata() { // Make sure to hash your userID UserId = "HashedUserId", // Make sure to hash the sessionID SessionId = "HashedSessionId", // Domain of the website where the search is conducted Domain = "www.google.com" }; TimestampRange timeStampRange = new TimestampRange() { StartTime = startTime, EndTime = endTime }; JobQuery jobQuery = new JobQuery() { PublishTimeRange = timeStampRange }; if (companyName != null) { jobQuery.CompanyNames = new List <string> { companyName }; } SearchJobsRequest searchJobRequest = new SearchJobsRequest() { RequestMetadata = requestMetadata, JobQuery = jobQuery, SearchMode = "JOB_SEARCH" }; SearchJobsResponse searchJobsResponse = jobServiceClient.Projects.Jobs.Search(searchJobRequest, parent).Execute(); Console.WriteLine("Jobs date range searched: " + ToJsonString(searchJobsResponse)); }
// [END featured_job] // [START search_featured_job] public static void SearchFeaturedJobs(string companyName) { RequestMetadata requestMetadata = new RequestMetadata() { // Make sure to hash your userID UserId = "HashedUserId", // Make sure to hash the sessionID SessionId = "HashedSessionId", // Domain of the website where the search is conducted Domain = "www.google.com" }; JobQuery jobQuery = new JobQuery() { Query = "Software Engineer" }; if (companyName != null) { jobQuery.CompanyNames = new List <string> { companyName }; } SearchJobsRequest searchJobRequest = new SearchJobsRequest() { RequestMetadata = requestMetadata, JobQuery = jobQuery, SearchMode = "FEATURED_JOB_SEARCH" }; SearchJobsResponse searchJobsResponse = jobServiceClient.Projects.Jobs.Search(searchJobRequest, parent).Execute(); Console.WriteLine("Featured jobs searched: " + ToJsonString(searchJobsResponse)); }
// [START job_search_commute_search] public static object CommuteSearchJobs(string projectId, string tenantId) { JobServiceClient jobServiceClient = JobServiceClient.Create(); TenantName name = TenantName.FromProjectTenant(projectId, tenantId); string domain = "www.example.com"; string sessionId = "Hashed session identifier"; string userId = "Hashed user identifier"; RequestMetadata requestMetadata = new RequestMetadata { Domain = domain, SessionId = sessionId, UserId = userId }; CommuteMethod commuteMethod = CommuteMethod.Driving; long seconds = 3600L; Duration travelDuration = new Duration { Seconds = seconds }; double latitude = 37.422408; double longitude = -122.084068; LatLng startCoordinates = new LatLng { Latitude = latitude, Longitude = longitude }; CommuteFilter commuteFilter = new CommuteFilter { CommuteMethod = commuteMethod, TravelDuration = travelDuration, StartCoordinates = startCoordinates }; JobQuery jobQuery = new JobQuery { CommuteFilter = commuteFilter }; SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantName = name, RequestMetadata = requestMetadata, JobQuery = jobQuery }; var response = jobServiceClient.SearchJobs(request); foreach (var result in response) { Console.WriteLine($"Job summary: {result.JobSummary}"); Console.WriteLine($"Job title snippet: {result.JobTitleSnippet}"); Job job = result.Job; Console.WriteLine($"Job name: {job.Name}"); Console.WriteLine($"Job title: {job.Title}"); } return(0); }
public async Task <ActionResult <SearchJobsModel> > SearchJobs(SearchJobsRequest request) { return(Ok(await _mediator.Send(new SearchJobsQuery(request)))); }