/// <summary> /// Uses PaseFolderIndex() to generate an array of url locations for files and subdirectories. /// </summary> /// <param name="directory">Directory to be affected.</param> void ConstructIndex(WebDirectory directory) { string[] index = ParseFolderIndex(directory.URL, directory); if (index != null) { for (int i = 0; i < index.Length; i++) { directory.AddressIndex.Add((directory.URL + "/" + index[i])); directory.NameIndex.Add(index[i]); UI.UpdateProgressBar(); } } for (int i = 0; i < directory.SubDirectories.Count; i++) { UI.UpdatePatchNotes ("Parsing Subdirectory: \n" + directory.SubDirectories[i].URL); ConstructIndex(directory.SubDirectories[i]); } }
internal static void PopulatePatchDirectories(PatchHandler handler) { try { for (int i = 0; i < PatchData.Versions.Count; i++) { List <PatchFile> temp = PatchData.Versions[PatchData.Versions.Keys.ElementAt(i)]; for (int n = 0; n < temp.Count; n++) { string[] segments = temp[n].filePath.Split('/'); int tempLength = 0; for (int z = 0; z < segments.Length - 1; z++) { tempLength += segments[z].Length; } string master = temp[n].filePath.Substring (0, tempLength + segments.Length - 1); WebDirectory tempDir = new WebDirectory(master, handler); tempDir.AddressIndex.Add(temp[n].filePath); tempDir.NameIndex.Add(temp[n].fileName); PatchData.PatchDirectories.Add(tempDir); } } } catch (Exception e) { LogHandler.LogErrors(e.ToString()); } }
internal string[] ParseFolderIndex_Alpha(string url, WebDirectory directory) { try { HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url); request.Timeout = 3 * 60 * 1000; request.KeepAlive = true; HttpWebResponse response = (HttpWebResponse)request.GetResponse(); if (response.StatusCode == HttpStatusCode.OK) { List <string> fileLocations = new List <string>(); string line; using (StreamReader reader = new StreamReader(response.GetResponseStream())) { while ((line = reader.ReadLine()) != null) { int index = line.IndexOf("<a href="); if (index >= 0) { string[] segments = line.Substring(index).Split('\"'); ///Can Parse File Size Here: Add todo if (!segments[1].Contains("/")) { fileLocations.Add(segments[1]); UI.UpdatePatchNotes("Web File Found: " + segments[1]); UI.UpdateProgressBar(); } else { if (segments[1] != @"../") { directory.SubDirectories.Add(new WebDirectory(url + segments[1], this)); UI.UpdatePatchNotes("Web Directory Found: " + segments[1].Replace("/", string.Empty)); } } } else if (line.Contains("</pre")) { break; } } } response.Dispose(); /// After ((line = reader.ReadLine()) != null) return(fileLocations.ToArray <string>()); } else { return(new string[0]); /// !(HttpStatusCode.OK) } } catch (Exception e) { LogHandler.LogErrors(e.ToString(), this); LogHandler.LogErrors(url, this); return(null); } }
/// <summary> /// HTTP Web request gathers data based on url parameter to build the web directory object. /// </summary> /// <param name="url">Initial Parse Location</param> /// <param name="directory">Directory object containing properties and methods for indexing and parsing contained folders.</param> /// <returns>Returns a string array containing child resource locations parsed from html (href);</returns> internal string[] ParseFolderIndex(string url, WebDirectory directory) { try { HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url); request.Timeout = 3 * 60 * 1000; request.KeepAlive = true; HttpWebResponse response = (HttpWebResponse)request.GetResponse(); bool endMet = false; if (response.StatusCode == HttpStatusCode.OK) { List <string> fileLocations = new List <string>(); string line; using (StreamReader reader = new StreamReader(response.GetResponseStream())) { while (!endMet) { line = reader.ReadLine(); //if (line != null && line != "" && line.IndexOf("</a>") >= 0) if (line != null && line != "" && line.Contains("</A>")) { if (line.Contains("</html>")) { endMet = true; } string[] segments = line.Replace("\\", "").Split('\"'); List <string> paths = new List <string>(); List <string> files = new List <string>(); for (int i = 0; i < segments.Length; i++) { if (!segments[i].Contains('<')) { paths.Add(segments[i]); } } paths.RemoveAt(0); foreach (String s in paths) { string[] secondarySegments = s.Split('/'); if (s.Contains(".") || s.Contains("Verinfo")) { files.Add(secondarySegments[secondarySegments.Length - 1]); } else { directory.SubDirectories.Add(new WebDirectory (url + "/" + secondarySegments[secondarySegments.Length - 2], this)); UI.UpdatePatchNotes("Web Directory Found: " + secondarySegments[secondarySegments.Length - 2]); } } foreach (String s in files) { if (!String.IsNullOrEmpty(s) && !s.Contains('%')) { fileLocations.Add(s); UI.UpdatePatchNotes("Web File Found: " + s); UI.UpdateProgressBar(); } } if (line.Contains("</pre")) { break; } } } } response.Dispose(); /// After ((line = reader.ReadLine()) != null) return(fileLocations.ToArray <string>()); } else { return(new string[0]); /// !(HttpStatusCode.OK) } } catch (Exception e) { LogHandler.LogErrors(e.ToString(), this); LogHandler.LogErrors(url, this); return(null); } }