public SalaryPost GetSalaries(FilterBag filters) { List<int> programIDs; if(String.IsNullOrEmpty( filters.FieldOfStudy )) { programIDs = db.Programs.Select(program => program.id).ToList(); } else { var query = db.Programs.Where(program => program.name.Contains(filters.FieldOfStudy)); programIDs = query.Select(program => program.id).ToList(); } List<int> salaries; var salaryQuery = db.GradPlacements.Where(grad => grad.placementSalary != null && grad.placementSalary > 1000 && programIDs.Contains(grad.Program.id)); salaryQuery = salaryQuery.OrderBy(grad => grad.placementSalary); salaries = salaryQuery.Select(grad => (int)grad.placementSalary).ToList(); // Do not display any statistic information unless there are at least 5 control points if(salaries.Count < 5) { return null; } int median = (int)salaries.Median(); int mean = (int)salaries.Average(); double stdDev = salaries.StandardDeviation(); return new SalaryPost() { Source = "RIT", Average = mean, Median = median, StandardDeviation = stdDev }; }
public Dictionary<string, List<AlumniPost>> GetAlumni(FilterBag filters) { Dictionary<string, List<AlumniPost>> alumni = new Dictionary<string, List<AlumniPost>>(); alumni.Add("Microsoft", new List<AlumniPost>() { new AlumniPost(){ Email="*****@*****.**", FieldOfStudy="Software Engineering", Location=new Location(){State="WA", City="Bellevue"}, Name="Colton Presler" }, new AlumniPost(){ Email="*****@*****.**", FieldOfStudy="Software Engineering", Location=new Location(){State="WA", City="Bellevue"}, Name="Chris Rosen" }, new AlumniPost(){ Email="*****@*****.**", FieldOfStudy="Software Engineering", Location=new Location(){State="WA", City="Bellevue"}, Name="David Lamont" }, new AlumniPost(){ Email="*****@*****.**", FieldOfStudy="Computer Science", Location=new Location(){State="WA", City="Bellevue"}, Name="Dan Corrigan" } }); alumni.Add("Microsoft Game Studios", new List<AlumniPost>()); return alumni; }
//Please refer to http://www.dice.com/common//content/util/apidoc/jobsearch.html for more info. // THEIR API IS EXTREMELY BASIC /// <summary> /// Builds a URI pertaining to the filters and parameters specified to query the Dice.Com REST JSON API. /// </summary> /// <param name="filters">The filters specific to a particular job search.</param> /// <param name="Page">Page of results.</param> /// <param name="ResultsPerPage">The number of results you want to be returned each time this method is called.</param> /// <returns>A string representing the URI for a REST query.</returns> public String BuildQuery( FilterBag filters , int Page , int ResultsPerPage ) { StringBuilder builder = new StringBuilder( ); //add job search api . builder.Append( "http://service.dice.com/api/rest/jobsearch/v1/simple.xml?country=US&sd=a&sort=1&pgcnt="+ResultsPerPage +"&page="+Page ); String text="&text="; //since this is perhaps the only field we have available aside from location and skill, everything is going to have to be added here. if ( filters.CompanyName != "" ) { text+= filters.CompanyName+" " ; } if ( filters.FieldOfStudy != "" ) { text+=filters.FieldOfStudy+" "; builder.Append( "&skill="+filters.FieldOfStudy ); } if ( filters.JobTitle != "" ) { text+= filters.JobTitle+" "; builder.Append( "&skill="+filters.Keyword ); } if ( filters.Keyword != "" ) { text+= filters.Keyword +" "; } if (filters.Location!=null && (filters.Location.City != "" || filters.Location.State != "" ||filters.Location.ZipCode !="" )) { builder.Append( "&city="+ filters.Location.City + ", "+ filters.Location.State +" " +filters.Location.ZipCode ); } if ( !text.Equals( "&text=" ) ) builder.Append( text ); return builder.ToString( ); }
protected void gridView_CustomCallback(object sender, ASPxGridViewCustomCallbackEventArgs e) { try { using (SqlConnection conn = new SqlConnection(SPlanetUtil.GetConnectionString())) { //Create array of Parameters List <SqlParameter> arrParm = new List <SqlParameter> { new SqlParameter("@search_name", SqlDbType.VarChar, 200) { Value = "" }, new SqlParameter("@id", SqlDbType.Int) { Value = 0 } }; conn.Open(); dsResult = SqlHelper.ExecuteDataset(conn, "sp_supplier_branch_list", arrParm.ToArray()); ViewState["dsResult"] = dsResult; } //Bind data into GridView gridView.DataSource = dsResult; gridView.FilterExpression = FilterBag.GetExpression(false); gridView.DataBind(); } catch (Exception ex) { string strErrorMsg = SPlanetUtil.LogErrorCollect(ex); ScriptManager.RegisterStartupScript(this, GetType(), "myalert", "alert('" + strErrorMsg + "');", true); } }
public static String BuildQuery( FilterBag FilterDict , int Page , int ResultsPerPage ) { StringBuilder builder = new StringBuilder( ); builder.Append( "http://localhost:57215/api/SymplicityJobs/GetPage/?PageNum="+Page+"&ResultsPerPage="+ResultsPerPage ); String Location ="NONE"; String Keyword="NONE"; String JobTitle ="NONE"; String Employer="NONE"; if ( FilterDict.CompanyName != "" ) { Employer = FilterDict.CompanyName; } if ( FilterDict.FieldOfStudy != "" ) { Keyword= FilterDict.FieldOfStudy; } if ( FilterDict.JobTitle != "" ) { JobTitle = FilterDict.JobTitle; } if ( FilterDict.Keyword != "" ) { Keyword = FilterDict.Keyword; } if ( FilterDict.Location!=null && ( FilterDict.Location.City != "" || FilterDict.Location.State != "" ||FilterDict.Location.ZipCode !="" ) ) { Location = FilterDict.Location.City + ", "+ FilterDict.Location.State +" " +FilterDict.Location.ZipCode; } builder.Append( "&Location="+Location+"&Keyword="+Keyword+"&JobTitle="+JobTitle+"&Employer="+Employer ); return builder.ToString( ); }
/// <summary> /// Asyncronously grap a set of job postings based off of the given filters and page number. /// </summary> /// <param name="filters">The filters that the module should use to query</param> /// <param name="page">What page number of results to grab</param> /// <returns>The job postings</returns> public List<JobPost> GetJobs( FilterBag filters , int page , int resultsPerPage ) { List<JobPost> jobsToReturn = new List<JobPost>( ); //build the URI query string request = BuildQuery( filters , page , resultsPerPage ); //get the raw JSON data WebClient wc = new WebClient( ); JArray JobPosts = JArray.Parse( wc.DownloadString( request ) ); //Process It into JobPost objects foreach ( var jobPost in JobPosts ) { JobPost post = new JobPost( ); post.URL = "https://arizona-csm.symplicity.com/sso/students/index.php?mode=form&id="+jobPost["SymplicityJobID"]+"&s=jobs&ss=jobs"; post.SourceModule =source; post.DatePosted = DateTime.Parse( jobPost["PostingDate"].ToString( ) ); post.JobTitle = jobPost["JobTitle"].ToString( ); string[] location = jobPost["Location"].ToString( ).Split( new char[] { ',' } ); post.Location = new Location { State= location[1].Trim( ) , City= location[0].Trim( ) , ZipCode=null }; post.Company = jobPost["Employer"].ToString( ); post.Description = jobPost["Description"].ToString( ); post.FieldOfStudy = null; post.Salary = jobPost["SalaryLevel"].ToString( ); jobsToReturn.Add( post ); } return jobsToReturn; }
static DemoUtils() { RegisterFilterEnums(); DashboardFilter = new FilterBag("Dashboard", CreateDashboardStandardFilters()); CustomerFilter = new FilterBag("Customer", CreateCustomerStandardFilters(), "Name", "BillingAddress.Line", "BillingAddress.City"); EmployeeFilter = new FilterBag("Employee", CreateEmployeeStandardFilters(), "FirstName", "LastName", "Title"); TaskFilter = new FilterBag("Task", CreateTaskStandardFilters(), "Subject", "Description", "AssignedEmployee.FirstName", "AssignedEmployee.LastName"); ProductFilter = new FilterBag("Product", CreateProductStandardFilters(), "Name"); }
private QueryBag(string queryString, UserData userData) { UserData = userData; var filterParser = new FilterParser(); FilterBag = filterParser.Parse(queryString); var sortParser = new SortParser(); SortBag = sortParser.Parse(queryString); }
public Dictionary<string, List<AlumniPost>> GetAlumni(FilterBag filters) { Dictionary<string, List<AlumniPost>> alumni = new Dictionary<string, List<AlumniPost>>(); if (filters.IsEmpty()) { return alumni; } // Use a dictionary of module to bool so each module can mark when it's complete, // this is used incase of a timeout so it can be determined which module did not complete. Dictionary<IAlumniModule, bool> moduleCompleted = new Dictionary<IAlumniModule, bool>(); foreach(IAlumniModule module in modules) { moduleCompleted.Add(module, false); } object lockObject = new Object(); var timeout = 5000; // 5 seconds var cts = new CancellationTokenSource(); var t = new Timer(_ => cts.Cancel(), null, timeout, -1); try { Parallel.ForEach(modules, new ParallelOptions { CancellationToken = cts.Token }, (module) => { try { Dictionary<string, List<AlumniPost>> partialJobs = module.GetAlumni(filters); lock (lockObject) { moduleCompleted[module] = true; alumni = alumni.Concat(partialJobs).ToDictionary(e => e.Key, e => e.Value); } } catch (Exception e) { Debug.WriteLine(e.ToString()); // The module failed. Not a system failure but the user should be notified // we need to create a mechanism to actually notify them and call it here } } ); } catch(OperationCanceledException) { // This is where we should notify the user that a source timed out // The source can be determined by looking at the dictionary moduleCompleted } return PostProcessAlumni(alumni); }
public List<HousingPost> GetHousing(FilterBag filters, int page, int resultsPerPage) { var query = dbConext.HousingReviews.AsQueryable(); if(!String.IsNullOrEmpty(filters.Keyword)) { query = query.Where(review => review.Description.Contains(filters.Keyword) || review.Title.Contains(filters.Keyword)); } List<HousingPost> reviews = query.Select(review => new HousingPost() { ID = review.Id, Title = review.Title, DatePosted = (DateTime)review.DateTime, Description = review.Description, Rating = (int)review.Rating, Location = new Data_Types.Location() { City = review.HousingLocation.City, State = review.HousingLocation.State, ZipCode = review.HousingLocation.ZipCode, Longitude = review.HousingLocation.Longitude ?? 0, Latitude = review.HousingLocation.Latitude ?? 0 }, PostedBy = review.SubmittedBy }).ToList(); if(filters.Location != null) { reviews = reviews.Where(review => { double lat1 = (Math.PI / 180) * filters.Location.Latitude; double lat2 = (Math.PI / 180) * review.Location.Latitude; double lon1 = (Math.PI / 180) * filters.Location.Longitude; double lon2 = (Math.PI / 180) * review.Location.Longitude; double dlat = lat2 - lat1; double dlon = lon2 - lon1; double a = Math.Pow(Math.Sin(dlat / 2), 2) + Math.Cos(lat1) * Math.Cos(lat2) * Math.Pow(Math.Sin(dlon / 2), 2); double c = 2 * Math.Atan2(Math.Sqrt(a), Math.Sqrt(1 - a)); double dist = 3961 * c; return dist < 50; }).ToList(); } reviews = reviews.OrderByDescending(review => review.Rating).ToList(); reviews = reviews.Skip(page * resultsPerPage).Take(resultsPerPage).ToList(); return reviews; }
public List<SalaryPost> GetSalaries(FilterBag filters) { List<SalaryPost> salaries = new List<SalaryPost>(); // Use a dictionary of module to bool so each module can mark when it's complete, // this is used incase of a timeout so it can be determined which module did not complete. Dictionary<ISalaryModule, bool> moduleCompleted = new Dictionary<ISalaryModule, bool>(); foreach (ISalaryModule module in modules) { moduleCompleted.Add(module, false); } object lockObject = new Object(); var timeout = 5000; // 5 seconds var cts = new CancellationTokenSource(); var t = new Timer(_ => cts.Cancel(), null, timeout, -1); try { Parallel.ForEach(modules, new ParallelOptions { CancellationToken = cts.Token }, (module) => { try { SalaryPost salaryPartial = module.GetSalaries(filters); lock (lockObject) { moduleCompleted[module] = true; if(salaryPartial != null) salaries.Add(salaryPartial); } } catch (Exception e) { Debug.WriteLine(e.ToString()); // The module failed. Not a system failure but the user should be notified // we need to create a mechanism to actually notify them and call it here } } ); } catch(OperationCanceledException) { // This is where we should notify the user that a source timed out // The source can be determined by looking at the dictionary moduleCompleted } return PostProcessSalaries(salaries); }
public List<HousingPost> GetHousing(FilterBag filters, int page, int resultsPerPage) { List<HousingPost> houses = new List<HousingPost>(); // Use a dictionary of module to bool so each module can mark when it's complete, // this is used incase of a timeout so it can be determined which module did not complete. Dictionary<IHousingModule, bool> moduleCompleted = new Dictionary<IHousingModule, bool>(); foreach (IHousingModule module in modules) { moduleCompleted.Add(module, false); } object lockObject = new Object(); var timeout = 5000; // 5 seconds var cts = new CancellationTokenSource(); var t = new System.Threading.Timer(_ => cts.Cancel(), null, timeout, -1); try { Parallel.ForEach(modules, new ParallelOptions { CancellationToken = cts.Token }, (module) => { try { List<HousingPost> partialJobs = module.GetHousing(filters, page, resultsPerPage); lock (lockObject) { moduleCompleted[module] = true; houses.AddRange(partialJobs); } } catch (Exception) { // The module failed. Not a system failure but the user should be notified // we need to create a mechanism to actually notify them and call it here } } ); } catch (OperationCanceledException) { // This is where we should notify the user that a source timed out // The source can be determined by looking at the dictionary moduleCompleted } return PostProcessHousing(houses); }
public FilterBag Parse(string pattern) { var bag = new FilterBag(); var candidates = pattern.Split("&").ToList(); candidates.ForEach(candidate => { FilterParam filterParam; if (IsFilterParam(candidate) && !bag.IsPresent(filterParam = Convert(candidate))) { bag.Add(filterParam); } }); return(bag); }
protected void BindGrid(bool isForceRefreshData = false) { try { if (!Page.IsPostBack || isForceRefreshData) { using (SqlConnection conn = new SqlConnection(SPlanetUtil.GetConnectionString())) { //Create array of Parameters List <SqlParameter> arrParm = new List <SqlParameter> { new SqlParameter("@user_id", SqlDbType.Int) { Value = ConstantClass.SESSION_USER_ID }, new SqlParameter("@id", SqlDbType.Int) { Value = 0 }, new SqlParameter("@search_name", SqlDbType.VarChar, 200) { Value = "" }, new SqlParameter("@service_type", SqlDbType.VarChar, 100) { Value = ConstantClass.SESSION_DEPARTMENT_SERVICE_TYPE }, // new SqlParameter("@customer_id", SqlDbType.Int) { Value = 0 }, }; conn.Open(); dsResult = SqlHelper.ExecuteDataset(conn, "sp_quotation_header_list", arrParm.ToArray()); conn.Close(); Session["SESSION_QUOTATION_LIST"] = dsResult; } } //Bind data into GridView gridView.DataSource = dsResult; gridView.FilterExpression = FilterBag.GetExpression(false); gridView.DataBind(); } catch (Exception ex) { string strErrorMsg = SPlanetUtil.LogErrorCollect(ex); ScriptManager.RegisterStartupScript(this, GetType(), "myalert", "alert('" + strErrorMsg + "');", true); } }
public List<JobPost> GetJobs(FilterBag filters, int page, int resultsPerModule) { if (filters.IsEmpty()) { return new List<JobPost>(); } List<Task> tasks = new List<Task>(); List<List<JobPost>> jobs = new List<List<JobPost>>(); object lockObject = new object(); foreach(IJobModule module in modules) { Task task = new Task(() => { try { List<JobPost> moduleJobs = module.GetJobs(filters, page, resultsPerModule); lock (lockObject) { jobs.Add(moduleJobs); } } catch (Exception) { // The module failed. Not a system failure but the user should be notified // we need to create a mechanism to actually notify them and call it here } }); tasks.Add(task); task.Start(); } Task.WaitAll(tasks.ToArray(), MaxWaitTime * 1000); // Create a copy of jobs incase a module finishes late and tries to modifiy the // collection while we're still using it. List<List<JobPost>> duplicatedJobs = new List<List<JobPost>>(jobs); if (ConfigLoader.JobConfig.RemoveDuplicatePosts) { RemoveDuplicateJobs(duplicatedJobs, page); } return PostProcessJobs(duplicatedJobs); ; }
public List<JobPost> GetJobs(FilterBag filters, int page, int resultsPerPage) { /*if (filters.Count == 0) { return new List<JobPost>(); } using (var client = new WebClient()) { string xml = client.DownloadString(new Uri(auth)); Console.WriteLine(xml); } return new List<JobPost>();*/ LinkedInTempData tempData = new LinkedInTempData(); LinkedInJobPost.LinkedInResult results = tempData.getDummyData(); return LinkedInResultToJobPosts(results); }
protected void gridView_CustomCallback(object sender, ASPxGridViewCustomCallbackEventArgs e) { var dsResult = new DataSet(); try { using (SqlConnection conn = new SqlConnection(SPlanetUtil.GetConnectionString())) { //Create array of Parameters List <SqlParameter> arrParm = new List <SqlParameter> { new SqlParameter("@user_id", SqlDbType.Int) { Value = ConstantClass.SESSION_USER_ID }, new SqlParameter("@id", SqlDbType.Int) { Value = 0 }, new SqlParameter("@search_name", SqlDbType.VarChar, 200) { Value = e.Parameters.ToString() } }; conn.Open(); dsResult = SqlHelper.ExecuteDataset(conn, "sp_sale_order_header_list", arrParm.ToArray()); conn.Close(); Session["SESSION_SALE_ORDER_LIST"] = dsResult; } //Bind data into GridView gridView.DataSource = dsResult; gridView.FilterExpression = FilterBag.GetExpression(false); gridView.DataBind(); } catch (Exception ex) { string strErrorMsg = SPlanetUtil.LogErrorCollect(ex); ScriptManager.RegisterStartupScript(this, GetType(), "myalert", "alert('" + strErrorMsg + "');", true); } }
public List<JobPost> GetJobs( FilterBag filters , int page , int resultsPerPage ) { List<JobPost> jobsToReturn = new List<JobPost>( ); if (filters.IsEmpty()) { return jobsToReturn; } string request = builder.BuildQuery( filters , page , resultsPerPage ); XDocument doc = XDocument.Load( request ); IEnumerable<XElement> results = doc.Descendants( "ResponseJobSearch" ).Single( ).Descendants( "Results" ).Single( ).Descendants( "JobSearchResult" ); foreach ( var jobPost in results ) { JobPost post = new JobPost( ); post.URL = jobPost.Element( "JobDetailsURL" ).Value; post.SourceModule =source; post.DatePosted = DateTime.Parse( jobPost.Element( "PostedDate" ).Value ); post.JobTitle = jobPost.Element( "JobTitle" ).Value; //this field is returned as "MN - Plymouth", so split by values string[] location = jobPost.Element( "Location" ).Value.Split( new char[] { '-' } ); post.Location = new Location { State= location[0].Trim( ) , City= location[1].Trim( ) , ZipCode=null }; //if the company name is empty, fill with city and state (there are some posts with an empty company name) post.Company = jobPost.Element( "Company" ).Value==""?location[1]+" , "+location[0]:jobPost.Element( "Company" ).Value; post.Description = jobPost.Element( "DescriptionTeaser" ).Value; post.FieldOfStudy = null; post.Salary = jobPost.Element( "Pay" ).Value; jobsToReturn.Add( post ); } return jobsToReturn; }
//Please refer to http://api.careerbuilder.com/Search/jobsearch/jobsearchinfo.aspx for more info. /// <summary> /// /// </summary> /// <param name="FilterDict"></param> /// <param name="Page"></param> /// <param name="ResultsPerPage"></param> /// <returns></returns> public String BuildQuery( FilterBag filters , int Page , int ResultsPerPage ) { StringBuilder builder = new StringBuilder( ); //add job search api and required dev key. builder.Append( "http://api.careerbuilder.com/v1/jobsearch?DeveloperKey=WDHV0RV6Q60BJ3WD2H15&PerPage="+ResultsPerPage +"&PageNumber="+ (Page+1).ToString() ); builder.Append(keywordConverter(filters.Keyword)); builder.Append(locationConverter(filters.Location)); builder.Append(jobTitleConverter(filters.JobTitle)); builder.Append(companyNameConverter(filters.CompanyName)); builder.Append(JobTypeConverter(filters.JobType.ToString())); builder = FieldOfStudyConverter( filters.FieldOfStudy.ToString( ), builder ) ; /* case Field.EducationCode: switch(FilterDict[key]) { case "Not Specified": builder.Append( "&EducationCode=DRNS"); break; case "None": builder.Append( "&EducationCode=DR3210"); break; case "High School": builder.Append( "&EducationCode=DR3211"); break; case "2 Year Degree": builder.Append( "&EducationCode=DR321"); break; case "4 Year Degree": builder.Append( "&EducationCode=DR32"); break; case "Graduate Degree": builder.Append( "&EducationCode=DR3"); break; case "Doctorate": builder.Append( "&EducationCode=DR"); break; default: break; */ return builder.ToString( ); }
public List<JobPost> GetJobs( FilterBag filters , int page , int resultsPerPage ) { List<JobPost> jobsToReturn = new List<JobPost>( ); ; string request = builder.BuildQuery( filters , page , resultsPerPage ); XDocument doc = XDocument.Load( request ); IEnumerable<XElement> results = doc.Descendants( "result" ).Single( ).Descendants( "resultItemList" ).Single( ).Descendants( "resultItem" ); foreach ( var jobPost in results ) { JobPost post = new JobPost( ); post.Company = jobPost.Element( "company" ).Value; post.URL = jobPost.Element( "detailUrl" ).Value; post.SourceModule =source; post.DatePosted = DateTime.Parse( jobPost.Element( "date" ).Value ); post.JobTitle = jobPost.Element( "jobTitle" ).Value; //this field is returned as "San Ramon, CA", so split by values string[] location = jobPost.Element( "location" ).Value.Split( new char[] { ',' } ); post.Location = new Location { State= location[1].Trim( ) , City= location[0].Trim( ) , ZipCode=null }; //Dice_Module doesn't even give a teaser...so I'm faking one. post.Description = jobPost.Element( "company" ).Value +" is looking for a " +jobPost.Element( "jobTitle" ).Value; post.FieldOfStudy = null; post.Salary = null; jobsToReturn.Add( post ); } return jobsToReturn; }
public static String BuildQuery( FilterBag filters , int Page , int ResultsPerPage ) { StringBuilder builder = new StringBuilder( ); ResultsPerPage = ResultsPerPage<=50?ResultsPerPage:50; Page = (Int32)( Page*( (Double)ResultsPerPage/50.0 ) ); //returns 50 pages builder.Append( "http://jobs.github.com/positions.json?markdown=false&page="+Page ); String description =""; if ( filters.CompanyName != "" ) description+= filters.CompanyName+", "; if ( filters.FieldOfStudy != "" ) description+= filters.FieldOfStudy+", "; if ( filters.JobTitle != "" ) description+= filters.JobTitle+", "; if ( filters.JobType.ToString( )=="FullTime" ) builder.Append( "&full_time=true" ); else if ( filters.JobType.ToString( )!="" ) description+=filters.JobType.ToString( )+","; if ( filters.Keyword != "" ) if ( filters.Keyword == "full time" ) builder.Append( "&full_time=true" ); else description+= filters.Keyword+","; if ( filters.Location!=null && ( filters.Location.City != "" || filters.Location.State != "" ||filters.Location.ZipCode !="" ) ) builder.Append( "&location="+ ( filters.Location.City??"" ) + ", "+ ( filters.Location.State??"" ) +" " +( filters.Location.ZipCode??"" ) ); if ( description!="" ) builder.Append( "&description="+description ); return builder.ToString( ); }
public string buildQuery(FilterBag filterbag, int page, int resultsPerPage) { // String builder, (arguably) more efficient than concatenating strings StringBuilder builder = new StringBuilder(); // The required base for all requests builder.Append(Constants.INDEED_REQUEST_BASE); //TODO: This should come from configuration builder.Append("&start=" + page * resultsPerPage); builder.Append(keywordConverter(filterbag.Keyword, filterbag.JobTitle, filterbag.CompanyName, filterbag.FieldOfStudy)); builder.Append(locationConverter(filterbag.Location)); // string ip = HttpContext.Current.Request.UserHostAddress; //string agent = HttpContext.Current.Request.Browser.Browser; // Required tags builder.Append(FORMAT_TAG); // The result comes back in JSON format builder.Append(limitConverter(Constants.RESULT_LIMIT)); // The limit of # of results returned builder.Append(useripConverter(USER_IP)); // The IP of the current user, for Indeed metrics builder.Append(useragentConverter(USER_AGENT)); // The browser of the current user, for Indeed metrics builder.Append(VERSION_TAG); // Version of the API, currently v.2 builder.Append( JobTypeConverter( filterbag.JobType.ToString( ) ) ); return builder.ToString(); }
public List<JobPost> GetJobs(FilterBag filterbag, int page, int resultsPerPage) { // Short circuit if there are no filters specified if ( filterbag.IsEmpty() ) { return new List<JobPost>(); } // Will try and build an Indeed API request from the given set of filters. Catches and logs any problems string request = ""; try { // Build the request based on filters request = builder.buildQuery(filterbag, page, resultsPerPage); // If the request comes back empty, something bad happened if (String.IsNullOrEmpty(request)) { throw new ArgumentException(); } } // The built request is, for some reason, empty or null. Return empty results list catch (ArgumentException argex) { Logging.JobSaltLogger.Instance.log("(Indeed) Error in IndeedQueryBuilder caused API request string to be empty or null."); Logging.JobSaltLogger.Instance.log(filterbag.ToString() + "\n Page=" + page + "\n resultsPerPage=" + resultsPerPage); return new List<JobPost>(); } // An unknown exception was thrown when trying to build the request. The request string // is empty now, so there's no point trying to continue (since it will return an empty list anyway) catch (Exception e) { Logging.JobSaltLogger.Instance.log("(Indeed) Exception caught while building Indeed Query: " + e.Message); Logging.JobSaltLogger.Instance.log(filterbag.ToString() + "\n Page=" + page + "\n resultsPerPage=" + resultsPerPage); return new List<JobPost>(); } IndeedResult iResult; // Raw Indeed results using (var client = new WebClient()) { string json = client.DownloadString(request); // Issues a Get to the Indeed API with the request string try { var serializer = new JavaScriptSerializer(); iResult = serializer.Deserialize<IndeedResult>(json); // Parses JSON result into C# Indeed data object // The raw Indeed results are null... something very bad happened! if (null == iResult) { throw new ArgumentException();} // This is used to make sure that if a higher page is requested than Indeed has, // then the last several posts don't keep getting returned (fixed bug) int startpost = resultsPerPage * page; return IndeedResultToJobPosts(iResult, startpost); // Parses C# Indeed data object into a list of JobPosts } // iResult is null, which means something went very wrong. Return an empty job list catch (ArgumentException argex) { Logging.JobSaltLogger.Instance.log("(Indeed) An error occured when parsing Indeed JSON into iResult, resulting in iResult being null: " + argex.Message); Logging.JobSaltLogger.Instance.log("(Indeed) JSON: \n" + json); return new List<JobPost>(); } // An unknown exception occured catch (Exception e) { Logging.JobSaltLogger.Instance.log("(Indeed) An exception occured when parsing Indeed JSON into iResult: " + e.Message); Logging.JobSaltLogger.Instance.log("(Indeed) JSON: \n" + json); return new List<JobPost>(); } } }
public Task<List<JobPost>> GetJobsAsync(FilterBag filters, int page, int resultsPerModule) { HttpContext context = HttpContext.Current; Task<List<JobPost>> task = new Task<List<JobPost>>(() => { HttpContext.Current = context; return GetJobs(filters, page, resultsPerModule); }); task.Start(); return task; }
/// <summary> /// Asyncronously grap a set of job postings based off of the given filters and page number. /// https://jobs.github.com/api /// </summary> /// <param name="filters">The filters that the module should use to query</param> /// <param name="page">What page number of results to grab</param> /// <returns>The job postings</returns> public List<JobPost> GetJobs( FilterBag filters , int page , int resultsPerPage ) { resultsPerPage = ( ( resultsPerPage<=50?resultsPerPage:50 )>=1?( resultsPerPage<=50?resultsPerPage:50 ):1 ); int startIndex = ( resultsPerPage*page )<50? ( resultsPerPage*page ):49; //int stopIndex = startIndex + resultsPerPage-1 <=50? startIndex + resultsPerPage-1 :50; List<JobPost> jobsToReturn = new List<JobPost>( ); //build the URI query string request = BuildQuery( filters , page , resultsPerPage ); //get the raw JSON data WebClient wc = new WebClient( ); JArray JobPosts = JArray.Parse( wc.DownloadString( request ) ); //Process It into JobPost objects foreach (JToken jobPost in JobPosts) { try { JobPost post = new JobPost( ); post.SourceModule =source; //url post.URL = jobPost["url"].ToString( ); //parse date into proper format String date = jobPost["created_at"].ToString().Replace("UTC ", ""); post.DatePosted = DateTime.ParseExact(date, "ddd MMM dd HH:mm:ss yyyy", null); //job title post.JobTitle = jobPost["title"].ToString( ); //location String[] location = jobPost["location"].ToString( ).Split( new char[] { ',' } ); if ( location.Length>=2 ) post.Location = new Location { State= location[1].Trim( ) , City= location[0].Trim( ) , ZipCode=null }; else if ( location.Length==1 ) { post.Location = new Location { City=location[0], State=null,ZipCode=null }; } else post.Location = new Location { City=null , State=null , ZipCode=null }; //company post.Company = jobPost["company"].ToString( ); string temp =jobPost["description"].ToString( ); //description post.Description = temp.Substring( 0 , temp.Length<350?temp.Length:350 )+"..."; post.FieldOfStudy = null; post.Salary = null; jobsToReturn.Add( post ); } catch ( Exception ex ) { System.Diagnostics.Debug.WriteLine( "GitHub Module:\t" +ex.Message ); } } return jobsToReturn.Skip( startIndex ).Take( resultsPerPage ).ToList<JobPost>( ); }
static SPlanetUtil() { //ObjectFilter = new FilterBag("DataFilter", CreateStandardFilters(), "is_enable"); ObjectFilter = new FilterBag("DataFilter"); }
protected void BindGrid(bool isForceRefreshData = false) { try { if (!Page.IsPostBack || isForceRefreshData) { string search = ""; if (Session["SEARCH"] != null) { search = Session["SEARCH"].ToString(); txtSearchBoxData.Value = search; } using (SqlConnection conn = new SqlConnection(SPlanetUtil.GetConnectionString())) { //Create array of Parameters List <SqlParameter> arrParm = new List <SqlParameter> { new SqlParameter("@search_name", SqlDbType.VarChar, 200) { Value = search }, new SqlParameter("@id", SqlDbType.Int) { Value = 0 } }; conn.Open(); dsResult = SqlHelper.ExecuteDataset(conn, "sp_supplier_list", arrParm.ToArray()); conn.Close(); /*int i = 0; * int rowId = 0; * if (Session["ROW_ID"] != null) * { * rowId = Convert.ToInt32(Session["ROW_ID"].ToString()); * // If -1, Find the most value * if (rowId == -1) * { * foreach (var row in dsResult.Tables[0].AsEnumerable()) * { * int id = Convert.ToInt32(row["id"]); * if (rowId < id) * { * rowId = id; * } * } * } * } * foreach (var row in dsResult.Tables[0].AsEnumerable()) * { * if (rowId == Convert.ToInt32(row["id"])) * { * int selectedRow = i; * int prevRow = Convert.ToInt32(Session["ROW"]); * int pageSize = gridView.SettingsPager.PageSize; * int pageIndex = (int)(selectedRow / pageSize); * int prevPageIndex = Convert.ToInt32(Session["PAGE"]); * if (prevRow == selectedRow) * { * Session["PAGE"] = prevPageIndex; * Session["ROW"] = prevPageIndex * pageSize; * } * else * { * Session["PAGE"] = pageIndex; * Session["ROW"] = selectedRow; * } * } * i++; * }*/ Session["SESSION_SUPPLIER_MASTER"] = dsResult; } } //Bind data into GridView gridView.DataSource = dsResult; gridView.FilterExpression = FilterBag.GetExpression(false); gridView.DataBind(); // Check page from session if (Session["ROW_ID"] != null) { int row = Convert.ToInt32(Session["ROW"]); gridView.FocusedRowIndex = row; } if (Session["PAGE"] != null) { int page = Convert.ToInt32(Session["PAGE"]); gridView.PageIndex = page; } if (!Page.IsPostBack && Session["COLUMN"] != null && Session["ORDER"] != null) { int order = Convert.ToInt32(Session["ORDER"]); if (order == 1) { ((GridViewDataColumn)gridView.Columns[Session["COLUMN"].ToString()]).SortAscending(); } else { ((GridViewDataColumn)gridView.Columns[Session["COLUMN"].ToString()]).SortDescending(); } } } catch (Exception ex) { string strErrorMsg = SPlanetUtil.LogErrorCollect(ex); ScriptManager.RegisterStartupScript(this, GetType(), "myalert", "alert('" + strErrorMsg + "');", true); } }
Dictionary<string, List<AlumniPost>> IAlumniModule.GetAlumni(FilterBag filters) { Dictionary<string, List<AlumniPost>> posts = new Dictionary<string, List<AlumniPost>>(); var AlumSearchQuery = db.GradPlacements.Select( grad => new AlumniPost { Company = grad.employerName, Location = new Location { State = grad.employerStateId, City = grad.employerCity, ZipCode = "" }, FieldOfStudy = grad.Program.name, Name = grad.Student.FirstName + " " + "Smith", PhoneNumber = "None found yet", Email = "*****@*****.**", GraduatingYear = grad.Student.CurrentExpectedGradTerm ?? 0 }); AlumSearchQuery = AlumSearchQuery.Where(alum => alum.Company != null); if( !String.IsNullOrEmpty(filters.CompanyName) ) { AlumSearchQuery = AlumSearchQuery.Where(alum => alum.Company.Contains(filters.CompanyName)); } if (!String.IsNullOrEmpty(filters.Keyword)) { AlumSearchQuery = AlumSearchQuery.Where(alum => alum.Name.Contains(filters.Keyword) || alum.Company.Contains(filters.Keyword) || alum.FieldOfStudy.Contains(filters.Keyword)); } if (!String.IsNullOrEmpty(filters.FieldOfStudy)) { AlumSearchQuery = AlumSearchQuery.Where(alum => alum.FieldOfStudy.Contains(filters.FieldOfStudy)); } AlumSearchQuery = AlumSearchQuery.OrderBy(item => item.Company); foreach (var alum in AlumSearchQuery.ToList()) { string company = alum.Company.Trim().ToLower(); Regex rgx = new Regex("[^a-zA-Z0-9 &-]"); company = rgx.Replace(company, ""); company = Thread.CurrentThread.CurrentCulture.TextInfo.ToTitleCase(company); if (posts.ContainsKey(company)) { posts[company].Add(alum); } else { posts.Add(company, new List<AlumniPost>() { alum }); } alum.GraduatingYear = ConvertTermToYear(alum.GraduatingYear); } return posts; }
public List<JobPost> GetJobs(FilterBag filters, int page, int resultsperpage) { List<JobPost> jobs = new List<JobPost>(); string query = "select * from Jobs left join Employers on Jobs.employerId = Employers.id Where"; List<SqlParameter> parameters = new List<SqlParameter>(); List<string> whereClauses = new List<string>(); // Only get jobs that were modified within the last year whereClauses.Add("[Jobs].modifiedDate > @RecentOrders"); parameters.Add(new SqlParameter("RecentOrders", DateTime.Now.AddMonths(-6))); //Use a WHERE clause to match filters perhaps? if (!String.IsNullOrWhiteSpace(filters.CompanyName)) { whereClauses.Add("[Employers].name like '%' + @CompanyName+ '%'"); parameters.Add(new SqlParameter("CompanyName", filters.CompanyName)); } if (filters.Location != null) { whereClauses.Add("([Employers].state = @StateLong OR [Employers].state = @StateShort) And [Employers].city = @City"); parameters.Add(new SqlParameter("StateLong", filters.Location.StateLong)); parameters.Add(new SqlParameter("StateShort", filters.Location.State)); parameters.Add(new SqlParameter("City", filters.Location.City)); } if (!String.IsNullOrWhiteSpace(filters.JobTitle)) { whereClauses.Add("[Jobs].title like '%' + @JobTitle + '%'"); parameters.Add(new SqlParameter("JobTitle", filters.JobTitle)); } if (!String.IsNullOrWhiteSpace(filters.Keyword)) { whereClauses.Add("[Jobs].description like '%' + @Keyword + '%' OR [Jobs].title like '%' + @Keyword + '%' OR [Employers].name like '%' + @Keyword + '%' OR [Jobs].qualifications like '%' + @Keyword + '%'"); parameters.Add(new SqlParameter("Keyword", filters.Keyword)); } if (!String.IsNullOrWhiteSpace(filters.FieldOfStudy)) { whereClauses.Add("[Jobs].description like '%' + @FieldOfStudy + '%'"); parameters.Add(new SqlParameter("FieldOfStudy", filters.FieldOfStudy)); } if (filters.JobType != JobType.All) { if (filters.JobType == JobType.FullTime) { whereClauses.Add("[Jobs].title like '%' + @JobType + '%'"); parameters.Add(new SqlParameter("JobType", "Fulltime")); } else if (filters.JobType == JobType.Internship) { whereClauses.Add("[Jobs].title like '%' + @JobType + '%'"); parameters.Add(new SqlParameter("JobType", "Co-op")); } } for(int i=0; i<whereClauses.Count; ++i) whereClauses[i] = "(" + whereClauses[i] + ")"; query += String.Join(" And ", whereClauses); query += " Order By [Jobs].modifiedDate Desc "; var jobsQuery = dbContext.Jobs.SqlQuery(query, parameters.ToArray()).AsNoTracking(); var jobsSearch = jobsQuery.Skip(page * resultsperpage); jobsSearch = jobsSearch.Take(resultsperpage); jobs = jobsSearch.Select( job => new JobPost() { Company = job.Employer.name, DatePosted = (DateTime)job.modifiedDate, Description = job.description, JobTitle = job.title, Location = new Location() { State = job.Employer.state, City = job.Employer.city, ZipCode = "" }, URL = @"https://rit-csm.symplicity.com/students/index.php?mode=form&s=jobs&ss=jobs&id=" + job.id }).ToList(); foreach (var job in jobs) job.SourceModule = source; return jobs; }