private void Map(VarList varList) { var newList = Command.CreateVarList(MapVars(varList)); varList.Clear(); varList.AddRange(newList); }
private void Map(VarList varList) { VarList varList1 = Command.CreateVarList(this.MapVars((IEnumerable <Var>)varList)); varList.Clear(); varList.AddRange((IEnumerable <Var>)varList1); }
private void Map(VarList varList) { VarList newList = Command.CreateVarList(MapVars(varList)); varList.Clear(); varList.AddRange(newList); }
public static bool Parse(Downloader downloader, ref string url) { var vars = new VarList(); reset: int retrys = 3; retry: bool bExec = false; string name = null; string script = null; string s; int line = 0; int i; vars["url"] = url; try { foreach (string ss in File.ReadAllLines(ScriptFile)) { s = script = ss.TrimStart(); line++; // line number if (bExec) { if (string.IsNullOrEmpty(s)) { break; // Blank line, End } if (s.StartsWith("^", StringComparison.Ordinal)) { continue; // fall through } i = s.IndexOf('='); name = s.Substring(0, i).TrimEnd(); s = s.Substring(i + 1); vars[name] = Evaluate(ref s, vars); if (Trace) { downloader.ReportStatus(string.Format("[{0}] {1} = {2}\r\n", line, name, vars[name])); } if (name == "result") { break; } if (name == "test") { // check result WebHeaderCollection headers = Web.HttpHead(vars["test"]); if (headers != null) { vars["result"] = vars["test"]; break; // Got a valid result, End } } else if (name == "url") { // url changed url = vars["url"]; downloader.ReportProgress(Downloader.CHANGEURL, url); vars.Clear(); goto reset; } else if (name == "regexp") { // find match links MatchCollection m = Regex.Matches(Web.HttpGet(url), vars[name]); if (m.Count > 0) { i = 0; while (i < m.Count) { s = Util.GetSubStr(m[i++].Value, "\"", "\""); if (s.StartsWith("//", StringComparison.Ordinal)) { #if false s = Regex.Match(url, "https?:").Value + s; #else continue; // skip #endif } if (s.StartsWith("/", StringComparison.Ordinal)) { s = Regex.Match(url, "https?://[^/]+/").Value + s.Remove(0, 1); } else { s = url.Remove(url.LastIndexOf("/", StringComparison.Ordinal) + 1) + s; } downloader.ReportProgress(Downloader.NEWURL, s); } } break; } else if (name == "rs" && !vars["rs"].Contains("http")) { // no result break; // End } } else if (s.StartsWith("^", StringComparison.Ordinal) || s.StartsWith("http", StringComparison.Ordinal)) { bExec = Regex.IsMatch(url, s, RegexOptions.IgnoreCase); } } if (bExec) { downloader.Referer = string.IsNullOrEmpty(vars["referer"]) ? url : vars["referer"]; url = vars["result"]; downloader.FileName = HttpUtility.HtmlDecode(vars["filename"]); if (!string.IsNullOrEmpty(vars["dir"])) { downloader.SavePath += "\\" + vars["dir"]; } } } catch (Exception ex) { if (ex is WebException || ex is IOException) { throw; //ex; } if (retrys-- > 0) goto retry; throw new Exception(string.Format("Script got error: [{0}] {1}", line, script)); } finally { vars.Clear(); } return true; }
public void SetVarList(VarList var) { var.Clear(); free_VarList.SetT(var); }
public static bool Parse(Downloader downloader, ref string url) { VarList vars = new VarList(); reset: int retrys = 3; retry: bool bExec = false; string name = null; string script = null; string s; int line = 0; int i; vars["url"] = url; try { foreach (string ss in File.ReadAllLines(ScriptFile)) { s = script = ss.TrimStart(); line++; // line number if (bExec) { if (string.IsNullOrEmpty(s)) { break; // Blank line, End } if (s.StartsWith("^")) { continue; // fall through } i = s.IndexOf('='); name = s.Substring(0, i).TrimEnd(); s = s.Substring(i + 1); vars[name] = Evaluate(ref s, vars); if (Trace) { downloader.ReportStatus(string.Format("[{0}] {1} = {2}\r\n", line, name, vars[name])); } if (name == "result") { break; } else if (name == "error") { s = vars[name]; if (!string.IsNullOrEmpty(s)) { throw new WebException(s); } } else if (name == "test") // check result { WebHeaderCollection headers = Web.HttpHead(vars["test"]); if (headers != null) { vars["result"] = vars["test"]; break; // Got a valid result, End } } else if (name == "url") // url changed { url = vars["url"]; downloader.ReportProgress(Downloader.CHANGEURL, url); vars.Clear(); goto reset; } else if (name == "regexp") // find match links { MatchCollection m = Regex.Matches(Web.HttpGet(url), vars[name]); if (m.Count > 0) { i = 0; while (i < m.Count) { s = Util.GetSubStr(m[i++].Value, "\"", "\""); if (s.StartsWith("//")) { #if false s = Regex.Match(url, "https?:").Value + s; #else continue; // skip #endif } else if (s.StartsWith("/")) { s = Regex.Match(url, "https?://[^/]+/").Value + s.Remove(0, 1); } else { s = url.Remove(url.LastIndexOf("/") + 1) + s; } downloader.ReportProgress(Downloader.NEWURL, s); } } break; } else if (name == "rs" && !vars["rs"].Contains("http")) // no result { break; // End } } else if (s.StartsWith("^") || s.StartsWith("http")) { bExec = Regex.IsMatch(url, s, RegexOptions.IgnoreCase); } } if (bExec) { downloader.Referer = string.IsNullOrEmpty(vars["referer"]) ? url : vars["referer"]; url = vars["result"]; downloader.FileName = HttpUtility.HtmlDecode(vars["filename"]); if (!string.IsNullOrEmpty(vars["dir"])) { downloader.SavePath += "\\" + vars["dir"]; } } } catch (Exception ex) { if (ex is WebException || ex is IOException) { throw; //ex; } if (retrys-- > 0) { goto retry; } throw new Exception(string.Format("Script got error: [{0}] {1}", line, script)); } finally { vars.Clear(); } return(true); }