public override void ExecuteResult(ControllerContext context) { if (string.IsNullOrWhiteSpace(this.ViewName)) { this.ViewName = context.RouteData.Values["action"].ToString(); } ViewEngineResult result = this.FindView(context); context.Controller.ViewData.Model = model; ViewDataDictionary viewData = context.Controller.ViewData; TempDataDictionary tempData = context.Controller.TempData; var writer = new StringWriter(); ViewContext viewContext = new ViewContext(context, result.View, viewData, tempData, writer); result.View.Render(viewContext, writer); var content = writer.ToString(); Scraping scraping = new Scraping(); if (AppSettings.UseScraping) { content = scraping.Render(content); } else { content = "<html><head><script src='/Scripts/jquery-1.7.1.min.js' type='text/javascript'></script></head><body>" + content + "</body></html>"; } context.HttpContext.Response.Write(content); }
static async Task Main(string[] args) { if (args.Length < 4) { PrintUsage(); } var domain = args[0]; var username = args[1]; var password = args[2]; var action = args[3]; var actionParameters = args.Skip(4).ToArray(); _scraping = new Scraping(new Uri(domain)); var login = await _scraping.Login(username, password); if (!login) { throw new Exception("Failed to login"); } switch (action) { case "blockdomain": await BlockDomain(actionParameters); break; default: Console.WriteLine($"did not recognize action '{action}'"); break; } }
public IEnumerable <string> Validate(Scraping metricsScraping) { if (string.IsNullOrWhiteSpace(metricsScraping?.Schedule) && string.IsNullOrWhiteSpace(_metricDefaults?.Scraping?.Schedule)) { yield return("No metrics scraping schedule is configured"); } }
public List <Scraping> parseDataToScrapping(JObject _resultado) { List <Scraping> scraping = new List <Scraping>(); foreach (KeyValuePair <string, JToken> keyValuePair in _resultado) { if (keyValuePair.Value.ToString().Contains("Error")) { Scraping scrap = new Scraping(); var errorMsg = keyValuePair.Value; JArray JArrayError = JsonConvert.DeserializeObject <JArray>(errorMsg.ToString()); foreach (var i in JArrayError) { JObject JsonOb = JsonConvert.DeserializeObject <JObject>(i.ToString()); foreach (KeyValuePair <string, JToken> keyVal in JsonOb) { scrap.label = ""; scrap.value = keyVal.Value.ToString(); scrap.Error = "Busca online em progresso"; scraping.Add(scrap); } } } else if (keyValuePair.Key.Equals("data")) { var str = keyValuePair.Value; JObject JsonObjectX = JsonConvert.DeserializeObject <JObject>(str.ToString()); foreach (KeyValuePair <string, JToken> keyValue in JsonObjectX) { var keyValueX = keyValue.Value; JArray JsonObjectY = JsonConvert.DeserializeObject <JArray>(keyValueX.ToString()); foreach (var i in JsonObjectY) { JObject JsonOb = JsonConvert.DeserializeObject <JObject>(i.ToString()); foreach (KeyValuePair <string, JToken> keyVal in JsonOb) { Scraping scrap = new Scraping(); var keyY = keyVal.Key; var keyValueY = keyVal.Value; scrap.label = keyY; scrap.value = keyValueY.ToString(); scraping.Add(scrap); } } } } } return(scraping); }
public ActionResult Scrape() { Scraping buttonExecution = new Scraping(); buttonExecution.DataInsertion(); // buttonExecution.TransitionToPortfolio(); // buttonExecution.InsertingData(); return(RedirectToAction("Index")); }
public MetricDefinition(PrometheusMetricDefinition prometheusMetricDefinition, Scraping scraping, AzureMetricConfiguration azureMetricConfiguration, ResourceType resourceType, List <IAzureResourceDefinition> resources) { AzureMetricConfiguration = azureMetricConfiguration; PrometheusMetricDefinition = prometheusMetricDefinition; Scraping = scraping; ResourceType = resourceType; Resources = resources; }
private List <Scraping> GetJArrayValue(JObject JArray) { List <Scraping> listScraping = new List <Scraping>(); foreach (KeyValuePair <string, JToken> keyValuePair in JArray) { Scraping scrap = new Scraping(); var retorno = keyValuePair.Value; scrap.label = retorno["label"].ToString(); scrap.value = retorno["value"].ToString(); listScraping.Add(scrap); } return(listScraping); }
/// <summary> /// Creates a new instance of the <see cref="ScrapeDefinition{TResourceDefinition}"/> class. /// </summary> /// <param name="azureMetricConfiguration">Configuration about the Azure Monitor metric to scrape</param> /// <param name="prometheusMetricDefinition">The details of the prometheus metric that will be created.</param> /// <param name="scraping">The scraping model.</param> /// <param name="resource">The resource to scrape.</param> /// <param name="resourceGroupName"> /// The name of the resource group containing the resource to scrape. This should contain the global /// resource group name if none is overridden at the resource level. /// </param> public ScrapeDefinition( AzureMetricConfiguration azureMetricConfiguration, PrometheusMetricDefinition prometheusMetricDefinition, Scraping scraping, TResourceDefinition resource, string resourceGroupName) { Guard.NotNull(azureMetricConfiguration, nameof(azureMetricConfiguration)); Guard.NotNull(prometheusMetricDefinition, nameof(prometheusMetricDefinition)); Guard.NotNull(scraping, nameof(scraping)); Guard.NotNull(resource, nameof(resource)); Guard.NotNull(resourceGroupName, nameof(resourceGroupName)); AzureMetricConfiguration = azureMetricConfiguration; PrometheusMetricDefinition = prometheusMetricDefinition; Scraping = scraping; Resource = resource; ResourceGroupName = resourceGroupName; }
public PesquisaStatus parseDataToScrapping(JObject _resultado, PesquisaStatus _pesquisa) { List <Scraping> scraping = new List <Scraping>(); foreach (KeyValuePair <string, JToken> keyValuePair in _resultado) { if (keyValuePair.Value.ToString().Contains("Error")) { Scraping scrap = new Scraping(); var errorMsg = keyValuePair.Value; JArray JArrayError = JsonConvert.DeserializeObject <JArray>(errorMsg.ToString()); foreach (var i in JArrayError) { JObject JsonOb = JsonConvert.DeserializeObject <JObject>(i.ToString()); foreach (KeyValuePair <string, JToken> keyVal in JsonOb) { scrap.label = ""; scrap.value = keyVal.Value.ToString(); scrap.Error = "Busca online em progresso"; scraping.Add(scrap); } } _pesquisa.PESQUISA_STATUS = "Em Progresso"; } else if (keyValuePair.Key.Equals("data")) { var str = keyValuePair.Value; JObject JsonObjectX = JsonConvert.DeserializeObject <JObject>(str.ToString()); foreach (KeyValuePair <string, JToken> keyValue in JsonObjectX) { var keyValueX = keyValue.Value; JArray JsonObjectY = JsonConvert.DeserializeObject <JArray>(keyValueX.ToString()); foreach (var i in JsonObjectY) { JObject JsonOb = JsonConvert.DeserializeObject <JObject>(i.ToString()); foreach (KeyValuePair <string, JToken> keyVal in JsonOb) { Scraping scrap = new Scraping(); var keyY = keyVal.Key; var keyValueY = keyVal.Value; if (keyY.ToString().ToUpper().StartsWith("ID_") || keyY.ToString().ToUpper().StartsWith("PDF_")) { } else if (keyY.ToString().ToUpper().StartsWith("NOME_COMPLETO")) { _pesquisa.NOME_COMPLETO = keyValueY.ToString(); } else { scrap.label = keyY; scrap.value = keyValueY.ToString(); scraping.Add(scrap); } } } } _pesquisa.PESQUISA_STATUS = "200"; } } _pesquisa.Scraping = scraping; return(_pesquisa); }
public ScrapeWeb() { string FullHtmlList = "/html/body/div[3]/div[2]/div[1]/section[1]/div[1]/div[2]/section/div/div[2]/div[2]"; //List of Hours string hourTest = "/html/body/div[3]/div[2]/div[1]/section[1]/div[1]/div/section/div/div"; //The total html of the page string ae = ""; // Parse tool for resturant names string names = "class=\"ds-loc\">"; // Parse tool for Menu categories string titlem = "<div class=\"menu-category\">"; //Parse tool for food items string foodie = "menu-items\">"; // Parse tool for Hours string wed = "<div class=\"row panel-body\">"; //Hour website string hourWeb = "https://www.rit.edu/fa/diningservices/places-to-eat/hours?date=2020-02-19&format=fullweek"; //Menu website string menuWeb = "https://www.rit.edu/fa/diningservices/general-menus"; Scraping scrape = new Scraping(); Random ran = new Random(); ListOfResturants = scrape.Get_List(names, FullHtmlList, menuWeb); List <Resturants> RIT_Rest = new List <Resturants>(); int Rest_Count = 0; foreach (string rest in ListOfResturants) { RIT_Rest.Add(new Resturants()); RIT_Rest[Rest_Count].Name = rest; RIT_Rest[Rest_Count].HourOpen = (ran.Next(6, 12) * 100) + (ran.Next(0, 59)); //random hour open generator if i couldnt get hours scraped RIT_Rest[Rest_Count].HourClose = (ran.Next(6, 12) * 100) + (ran.Next(0, 59)); //random hour close generator if i couldnt get hours scraped //This description is a basis RIT_Rest[Rest_Count].Description = "Located in Monroe Hall, Artesano is the on-campus patisserie offering a wide selection of upscale delicacies, including breakfast pastries and sandwiches, cookies, tarts, and a wide variety of gluten-free, vegan baked goods and specialty created treats. Artesano proudly serves blended frozen drinks, espressos, lattes and cappuccinos from Peet’s Coffee."; Rest_Count++; } ListOfMenus = scrape.Get_List(titlem, FullHtmlList, menuWeb); int Menu_Count = 0; List <Menus> Rit_Menus = new List <Menus>(); foreach (string menu in ListOfMenus) { Rit_Menus.Add(new Menus()); Rit_Menus[Menu_Count].Name = menu; Menu_Count++; } ListOfFood = scrape.Get_List(foodie, FullHtmlList, menuWeb); int Food_Count = 0; Random rand = new Random(); List <Foods> Rit_Food = new List <Foods>(); foreach (string food in ListOfFood) { Rit_Food.Add(new Foods()); Rit_Food[Food_Count].Name = food; Rit_Food[Food_Count].Price = rand.Next(1, 14);//Since we cant actually get the price from the website we will use this for now Food_Count++; } List <string> ResturantsAndMenus = scrape.Get_List(titlem, FullHtmlList, menuWeb, names); List <string> MenusAndFood = scrape.Get_List(titlem, FullHtmlList, menuWeb, foodie); int aCount = 0; List <int> keysRest = new List <int>(); List <int> keysMenu = new List <int>(); foreach (string RestOrMenu in ResturantsAndMenus) { foreach (Resturants rest in RIT_Rest) { if (rest.Name == RestOrMenu) { keysRest.Add(aCount); } } aCount++; } int bCount = 0; foreach (string MenuOrFood in MenusAndFood) { foreach (Menus Menu in Rit_Menus) { if (Menu.Name == MenuOrFood) { keysMenu.Add(bCount); } } bCount++; } List <string> typess = new List <string>(); typess.Add("Breakfast"); int menuOrFoodCount = 0; //for(int i = 0;i<Rit_Menus) Console.WriteLine(Rit_Menus.Count + "\n" + Rit_Food.Count); foreach (Menus menu in Rit_Menus) { menu.Add_Food(Rit_Food[menuOrFoodCount]); menuOrFoodCount++; } /* * Waiting til i fix the food <br> problem before i implement more than one food item * * * for (int a = 0; a < Rit_Menus.Count; a++) * { * if (a != Rit_Menus.Count - 1) * { * * for (int b = keysMenu[a]; b < (keysMenu[a + 1] - 1); b++) * { * Rit_Menus[a].Add_Food(Rit_Food[menuOrFoodCount]); * * menuOrFoodCount++; * } * } * else * { * for (int b = menuOrFoodCount; b < Rit_Menus.Count; b++) * { * * Rit_Menus[a].Add_Food(Rit_Food[menuOrFoodCount]); * menuOrFoodCount++; * } * } * } * */ int restOrMenuCount = 0; for (int a = 0; a < RIT_Rest.Count; a++) { if (a != RIT_Rest.Count - 1) { for (int b = keysRest[a]; b < (keysRest[a + 1] - 1); b++) { RIT_Rest[a].Add_Menu(Rit_Menus[restOrMenuCount]); if (typess.Contains(Rit_Menus[restOrMenuCount].Name)) { RIT_Rest[a].Type.Add(Rit_Menus[restOrMenuCount].Name); if (Rit_Menus[restOrMenuCount].Name == "Breakfast") { RIT_Rest[a].Breakfast = new MealHours(rand.Next(6, 9) * 100, 900); } else if (Rit_Menus[restOrMenuCount].Name == "Lunch") { RIT_Rest[a].Lunch = new MealHours(1130, 1200); } else if (Rit_Menus[restOrMenuCount].Name == "Dinner") { RIT_Rest[a].Dinner = new MealHours(1230, 1300); } } restOrMenuCount++; } } else { for (int b = restOrMenuCount; b < Rit_Menus.Count; b++) { RIT_Rest[a].menus.Add(Rit_Menus[restOrMenuCount]); if (typess.Contains(Rit_Menus[restOrMenuCount].Name)) { RIT_Rest[a].Type.Add(Rit_Menus[restOrMenuCount].Name); } restOrMenuCount++; } } } /* * Add food now */ System.IO.File.WriteAllText(@"../../json1.json", string.Empty); JsonSerializer serializer = new JsonSerializer(); using (StreamWriter sw = new StreamWriter("../../json1.json")) using (JsonWriter writer = new JsonTextWriter(sw)) { serializer.Serialize(writer, RIT_Rest); } Console.ReadLine(); //Loop through all theresturant and menus scrape //Add the menu to the list in each resturant once the name of a resturant is called //loop through all the foods and menus and resturants //Once a resutrant name is called increase count then once a mneu is called increase the menu count //from there keep adding food to the menu once a resturant or menu is called increase the appropriate count // }