private void LinearAddList(string text, int unityNumber) { for (int i = 1; i <= unityNumber; i++) { UrlList.Add(string.Format(text, i)); } }
// Methods /// <summary> /// Add a site to the URL list and checks for the maximum size. /// </summary> /// <param name="url">Site to be added</param> public void AddSiteToList(string url) { if (UrlList.Count >= ListSize) { UrlList.RemoveAt(0); } UrlList.Add(url); }
private void UpdateCredentialsInPersistentStore() { _ps.UpdateCredentials(_netClient.Url, _netClient.TenancyName, _netClient.UserName, _netClient.Password, _netClient.AuthToken); if (!UrlList.Contains(_netClient.Url)) { UrlList.Add(_netClient.Url); NotifyPropertyChanged(nameof(UrlList)); } }
public void AddFeed(Feed feed) { if (feed != null) { if (!FeedList.Contains(feed)) { UrlList.Add(feed.Link); FeedList.Add(feed); } } }
private void parseUrlAddress(string loc) { if (loc.Contains("xml")) { innerSiteMapUrlList.Add(loc); } else { UrlList.Add(loc); } }
internal void SslBlackList(FileInfo mfile) { string path = Common.GetPath(); //FileUrl = Common.GetURL();// Creates a dictionary of Filename and URL from which it got downloaded using (var rd = new StreamReader(path + mfile.ToString())) { List <KeyValuePair <string, List <string> > > InfoList = new List <KeyValuePair <string, List <string> > >(); string fieldC = "category"; string fieldD = "description"; string fieldU = "url"; string fieldT = "date"; string IP = ""; string IpDescription = ""; string category = ""; string IpDate = ""; string URL = ""; string line1 = rd.ReadLine(); string line2 = rd.ReadLine(); string line3 = rd.ReadLine(); while (!rd.EndOfStream) { string line = rd.ReadLine(); string[] info = line.Split(','); IpDate = line3.Split(':')[1]; if (info.Count() == 3) { IP = info[0]; IpDescription = info[2]; category = "C & C"; URL = "https://sslbl.abuse.ch"; DescriptionList.Add(IpDescription); CategoryList.Add(category); UrlList.Add(URL); IpDateList.Add(IpDate); InfoList.Add(new KeyValuePair <string, List <string> >(fieldC, CategoryList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldD, DescriptionList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldU, UrlList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldT, IpDateList)); if (!BlackListDB.ContainsKey(IP)) { BlackListDB.Add(IP, InfoList); totalip.Add(IP); } CategoryList = new List <string>(); DescriptionList = new List <string>(); UrlList = new List <string>(); InfoList = new List <KeyValuePair <string, List <string> > >(); IpDateList = new List <string>(); } } } }
public Dictionary <string, List <KeyValuePair <string, List <string> > > > BadIpMain() { List <string> AllIP = new List <string>(); List <KeyValuePair <string, List <string> > > InfoList = new List <KeyValuePair <string, List <string> > >(); string fieldC = "category"; string fieldD = "description"; string fieldU = "url"; string fieldT = "date"; string IP = ""; string IpDescription = ""; string category = ""; string IpDate = ""; string URL = ""; Dictionary <string, string> CatDesc = new Dictionary <string, string>(); CatDesc = GetCatDesc(); foreach (string cat in CatDesc.Keys) { AllIP = Get("https://www.badips.com/get/list/" + cat + "/0"); foreach (string ip in AllIP) { IP = ip; category = cat; IpDescription = CatDesc[cat]; URL = "https://www.badips.com/get/list/" + cat + "/0"; IpDate = "No Date Provided"; DescriptionList.Add(IpDescription); CategoryList.Add(category); UrlList.Add(URL); IpDateList.Add(IpDate); InfoList.Add(new KeyValuePair <string, List <string> >(fieldC, CategoryList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldD, DescriptionList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldU, UrlList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldT, IpDateList)); if (IP != "") { if (!BlackListDB.ContainsKey(IP)) { totalip.Add(IP); BlackListDB.Add(IP, InfoList); } } CategoryList = new List <string>(); DescriptionList = new List <string>(); UrlList = new List <string>(); InfoList = new List <KeyValuePair <string, List <string> > >(); IpDateList = new List <string>(); } } return(BlackListDB); }
public override bool OpenUrl(UIApplication app, NSUrl url, NSDictionary options) { //ApplicationLogic.OpenFileUrl(url.AbsoluteString); NSNotificationCenter.DefaultCenter.PostNotificationName("OpenUrl", url); if (!UrlList.Contains(url.Path)) { UrlList.Add(url.Path); } Debug.WriteLine("imported url = " + url.ToString()); return(true); }
public void FetchAllUrls(int depth) { if (Source == null) { return; } using StreamReader sr = new StreamReader(Source); MatchCollection matches = UrlPattern.Matches(sr.ReadToEnd()); foreach (Match match in matches) { string cleanUrl = CleanUrl(match.Value); if (!string.IsNullOrEmpty(cleanUrl)) { Url url = new Url(new Uri(cleanUrl), depth + 1, UrlStatus.Queue); UrlList.Add(url); _logger.LogInformation("Grabbed URL from page", url.uri.AbsoluteUri); } } }
/// <summary> /// 通过正则获取html中的UrlId /// </summary> /// <param name="html">html</param> /// <returns></returns> public string GetUrlIDByZz(string html) { string result = 成功; string zz = "<a title=\"([\\s\\S]*?)\" href=\"//h.bilibili.com/([\\s\\S]*?)\\?from=search\" target=\"_blank\" class=\"title\">"; string[] strings = reptile.Analysis(zz, "$2", html);//获取UrlId string upZz = "<a href=\"([\\s\\S]*?)\" target=\"_blank\" class=\"up-name\">([\\s\\S]*?)</a>"; string[] ups = reptile.Analysis(upZz, "$2", html);//获取Up主 for (var i = 0; i < strings.Length; i++) { UrlList.Add( new DownLoadData { UpName = ups[i] .Replace("<em class=\"keyword\">", "") .Replace("</em>", ""), UrlId = strings[i], State = "未下载" }); } return(result); }
public void UrlButton_OnClick(object sender, RoutedEventArgs e) { if (TokenCache.CurrentToken.IsNullOrEmpty()) { MessageBox.Show("请获取token后查询"); return; } var realurl = string.Format(RequestUrl.Contains("?") ? @"{0}&access_token={1}" : @"{0}?access_token={1}", RequestUrl, TokenCache.CurrentToken); if (RequestType.ToUpper() == "POST") { var responsejson = HttpHelper.HttpRequestPost(realurl, RequestBody); ResponseBody = responsejson; } if (RequestType.ToUpper() == "GET") { var responsejson = HttpHelper.HttpRequestGet(realurl); ResponseBody = responsejson; } UrlList.Add(RequestUrl); }
internal void blocklistDe(FileInfo mfile) { string path = Common.GetPath(); //FileUrl = Common.GetURL();// Creates a dictionary of Filename and URL from which it got downloaded using (var rd = new StreamReader(path + mfile.ToString())) { List <KeyValuePair <string, List <string> > > InfoList = new List <KeyValuePair <string, List <string> > >(); string fieldC = "category"; string fieldD = "description"; string fieldU = "url"; string fieldT = "date"; string IP = ""; string IpDescription = ""; string category = ""; string IpDate = ""; string URL = ""; while (!rd.EndOfStream) { string line = rd.ReadLine(); IP = line.Trim(); IpDate = "Provider updates every 48hrs"; if (mfile.ToString() == "all.txt") { IpDescription = "All IP addresses that have attacked one of our customers/servers in the last 48 hours. "; category = "attacks"; } if (mfile.ToString() == "ssh.txt") { IpDescription = "All IP addresses which have been reported within the last 48 hours as having run attacks on the service"; category = "ssh attacks"; } if (mfile.ToString() == "bots.txt") { IpDescription = "All IP addresses which have been reported within the last 48 hours as having run attacks attacks on the RFI-Attacks, REG-Bots, IRC-Bots or BadBots (BadBots = he has posted a Spam-Comment on a open Forum or Wiki). "; category = "bots"; } if (mfile.ToString() == "ircbot.txt") { IpDescription = "Irc bots found in last 48 hours. "; category = "ircbot"; } if (mfile.ToString() == "bruteforcelogin.txt") { IpDescription = "All IPs which attacks Joomlas, Wordpress and other Web-Logins with Brute-Force Logins. "; category = "bruteforcelogin"; } URL = "http://www.blocklist.de/en/export.html"; DescriptionList.Add(IpDescription); CategoryList.Add(category); UrlList.Add(URL); IpDateList.Add(IpDate); InfoList.Add(new KeyValuePair <string, List <string> >(fieldC, CategoryList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldD, DescriptionList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldU, UrlList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldT, IpDateList)); if (!BlackListDB.ContainsKey(IP)) { totalip.Add(IP); BlackListDB.Add(IP, InfoList); } CategoryList = new List <string>(); DescriptionList = new List <string>(); UrlList = new List <string>(); InfoList = new List <KeyValuePair <string, List <string> > >(); IpDateList = new List <string>(); } } }
internal void IPsetDB(FileInfo mfile) { string path = Common.GetPath(); FileUrl = Common.GetURL();// Creates a dictionary of Filename and URL from which it got downloaded using (var rd = new StreamReader(path + mfile.ToString())) { List <KeyValuePair <string, List <string> > > InfoList = new List <KeyValuePair <string, List <string> > >(); string fieldC = "category"; string fieldD = "description"; string fieldU = "url"; string fieldT = "date"; string IP = ""; string IpDescription = ""; string category = ""; string IpDate = ""; string url = ""; List <string> tempDesc = new List <string>(); int i = 1; string t = ""; string l = ""; int k = 0; while (!rd.EndOfStream) { string x = rd.ReadLine(); if (i != k && k == 0) { if (i >= 6) { if (!x.Contains("Maintainer")) { tempDesc.Add(x); } else { k = i; k++; l = string.Join(" ", tempDesc); l = l.Replace("#", " "); IpDescription = l; } } } i++; if (i == k) { k = 1; } category = x.Contains("Category") ? x.Split(':')[1].ToString().Trim() : category; IpDate = x.Contains("Source File Date") ? x.Split(':')[1].ToString().Trim() : IpDate; url = x.Contains("List source URL") ? x.Split(new char[] { ':' }, 2, StringSplitOptions.RemoveEmptyEntries)[1].ToString().Trim() : url; var match = Regex.Match(x, @"\b(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\b"); if (match.ToString() != "") { IP = match.ToString().Trim(); DescriptionList.Add(IpDescription); CategoryList.Add(category); UrlList.Add(url); IpDateList.Add(IpDate); InfoList.Add(new KeyValuePair <string, List <string> >(fieldC, CategoryList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldD, DescriptionList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldU, UrlList)); InfoList.Add(new KeyValuePair <string, List <string> >(fieldT, IpDateList)); if (!BlackListDB.ContainsKey(IP)) { totalip.Add(IP); BlackListDB.Add(IP, InfoList); } else { InfoList = BlackListDB[IP]; foreach (KeyValuePair <string, List <string> > item in InfoList) { if (item.Key == "category") { if (!item.Value.Contains(category)) { item.Value.Add(category); } } if (item.Key == "url") { if (!item.Value.Contains(FileUrl[mfile.ToString()])) { item.Value.Add(FileUrl[mfile.ToString()]); } } } } CategoryList = new List <string>(); DescriptionList = new List <string>(); UrlList = new List <string>(); InfoList = new List <KeyValuePair <string, List <string> > >(); IpDateList = new List <string>(); } } } }
public void GIVEN_list_with_one_url_WHEN_getNext_called_THEN_returns_correct_url() { list.Add("http://www.slb.com"); Assert.AreEqual("http://www.slb.com", list.GetNext()); }