internal static void MakeCourseList(Course courseContent, string courseDir) { StringBuilder csv = new StringBuilder(); csv.Append("Week Number, Class Number, Class Name, Link, Name"); csv.Append(Environment.NewLine); foreach (Week week in courseContent.Weeks) { foreach (ClassSegment classSegment in week.ClassSegments) { string key = classSegment.ResourceLinks.Keys.First(); string val = classSegment.ResourceLinks.Values.First(); string newLine = string.Format("{0},{1},{2},{3},{4}{5}", week.WeekNum, classSegment.ClassNum, classSegment.ClassName, key, val, Environment.NewLine); csv.Append(newLine); } } Directory.CreateDirectory(courseDir); //File.Create(Path.Combine(courseDir, "content.csv")).Close(); File.WriteAllText(Path.Combine(courseDir, "content.csv"), csv.ToString()); }
public abstract void Download(string courseName, string destDir, bool b, bool gzipCourses, Course courseContent);
/// <summary> /// Download all the contents (quizzes, videos, lecture notes, ...) of the course to the given destination directory (defaults to .) /// </summary> /// <param name="courseName"> </param> /// <param name="destDir"></param> /// <param name="reverse"></param> /// <param name="gzipCourses"></param> /// <param name="weeklyTopics"> </param> public virtual void DownloadCourse(string courseName, string destDir, bool reverse, bool gzipCourses, Course weeklyTopics) { if (!weeklyTopics.Weeks.Any()) { Console.WriteLine(" Warning: no downloadable content found for {0}, did you accept the honour code?", courseName); } else { Console.WriteLine(" * Got all downloadable content for {0} ", courseName); } if (reverse) { weeklyTopics.Weeks.Reverse(); } //where the course will be downloaded to string courseDir = Path.Combine(destDir, courseName); //if (!Directory.Exists(courseDir)) //{ // Directory.CreateDirectory(courseDir); //} Console.WriteLine("* " + courseName + " will be downloaded to " + courseDir); //download the standard pages Console.WriteLine(" - Downloading lecture/syllabus pages"); Download(string.Format(_courseraCourse.HOME_URL, courseName), courseDir, "index.html"); Download(string.Format(_courseraCourse.LectureUrlFromName(courseName)), courseDir, "lectures.html"); try { DownloadAbout(courseName, courseDir, _courseraCourse.ABOUT_URL); } catch (Exception e) { Console.WriteLine("Warning: failed to download about file: {0}", e.Message); } //now download the actual content (video's, lecture notes, ...) foreach (Week week in weeklyTopics.Weeks) { //TODO: filter /*if (Wk_filter && week.Key) * { * * } * * if self.wk_filter and j not in self.wk_filter: * print_(" - skipping %s (idx = %s), as it is not in the week filter" % * (weeklyTopic, j)) * continue */ // add a numeric prefix to the week directory name to ensure // chronological ordering string wkdirname = week.WeekNum.ToString().PadLeft(2, '0') + " - " + week.WeekName; //ensure the week dir exists Console.WriteLine(" - " + week.WeekName); string wkdir = Path.Combine(courseDir, wkdirname); Directory.CreateDirectory(wkdir); foreach (ClassSegment classSegment in week.ClassSegments) { //ensure chronological ordering string clsdirname = classSegment.ClassNum.ToString().PadLeft(2, '0') + " - " + classSegment.ClassName; //ensure the class dir exists string clsdir = Path.Combine(wkdir, clsdirname); clsdir = Utilities.TrimPathPart(clsdir, _courseraCourse.Max_path_part_len - 15); Directory.CreateDirectory(clsdir); Console.WriteLine(" - Downloading resources for " + classSegment.ClassName); //download each resource foreach (KeyValuePair <string, string> resourceLink in classSegment.ResourceLinks) { try { Download(resourceLink.Key, clsdir, resourceLink.Value); } catch (Exception e) { Console.WriteLine(" - failed: {0}, {1}", resourceLink.Key, e.Message); //throw e; } } } } if (gzipCourses) { SevenZipCompressor zipCompressor = new SevenZipCompressor(); zipCompressor.CompressDirectory(destDir, courseName + ".7z"); } /* * * if gzip_courses: * tar_file_name = courseName + ".tar.gz" * print_("Compressing and storing as " + tar_file_name) * tar = tarfile.open(os.path.join(dest_dir, tar_file_name), 'w:gz') * tar.add(os.path.join(dest_dir, courseName), arcname=courseName) * tar.close() * print_("Compression complete. Cleaning up.") * shutil.rmtree(os.path.join(dest_dir, courseName)) */ }
public void DownloadCourse(string courseName, string destDir, bool reverse, bool gzipCourses, Course courseContent) { if (!courseContent.Weeks.Any()) { Console.WriteLine(" Warning: no downloadable content found for {0}, did you accept the honour code?", courseName); } else { Console.WriteLine(" * Got all downloadable content for {0} ", courseName); } if (reverse) { courseContent.Weeks.Reverse(); } //where the course will be downloaded to string courseDir = Path.Combine(destDir, courseName); //if (!Directory.Exists(courseDir)) //{ // Directory.CreateDirectory(courseDir); //} Console.WriteLine("* " + courseName + " will be downloaded to " + courseDir); //download the standard pages Console.WriteLine(" - Downloading lecture/syllabus pages"); Download(string.Format(_futureleanCourse.HOME_URL, courseName), courseDir, "index.html"); Download(string.Format(_futureleanCourse.LectureUrlFromName(courseName)), courseDir, "lectures.html"); /* // TextFieldParser is in the Microsoft.VisualBasic.FileIO namespace. using (TextFieldParser parser = new TextFieldParser(Path.Combine(courseDir, "content.csv"))) { parser.CommentTokens = new string[] { "#" }; parser.SetDelimiters(new string[] { "," }); parser.HasFieldsEnclosedInQuotes = true; // Skip over header line. parser.ReadLine(); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); string file = fields[8]; string url = fields[7]; //if (Path.HasExtension(file) && Path.GetExtension(file) == ".html") //{ // string s = _futureleanCourse._client.DownloadString(url); // byte[] pdfBuf = new SynchronizedPechkin(new GlobalConfig()).Convert(s); // File.WriteAllBytes(file, pdfBuf); // //FileStream fs = new FileStream(file, FileMode.Create); // //fs.Write(pdfBuf, 0, pdfBuf.Length); // //api/content/v1/parser?url=http://blog.readability.com/2011/02/step-up-be-heard-readability-ideas/&token=1b830931777ac7c2ac954e9f0d67df437175e66e // //35aa55213619367d18118598984a4647a3d073dc // string token = "35aa55213619367d18118598984a4647a3d073dc"; // string format = string.Format("http://www.readability.com/api/content/v1/parser?url={0}&token={1}", url, token); // string downloadString = _futureleanCourse._client.DownloadString(format); //} } } */ //now download the actual content (video's, lecture notes, ...) foreach (Week week in courseContent.Weeks) { //TODO: filter /*if (Wk_filter && week.Key) { } * * if self.wk_filter and j not in self.wk_filter: print_(" - skipping %s (idx = %s), as it is not in the week filter" % (weeklyTopic, j)) continue */ //Filter the text stuff only // add a numeric prefix to the week directory name to ensure // chronological ordering string wkdirname = week.WeekNum.ToString().PadLeft(2, '0') + " - " + week.WeekName; //ensure the week dir exists Console.WriteLine(" - " + week.WeekName); string wkdir = Path.Combine(courseDir, wkdirname); Directory.CreateDirectory(wkdir); foreach (ClassSegment classSegment in week.ClassSegments) { //ensure chronological ordering string clsdirname = classSegment.ClassName; //ensure the class dir exists //string clsdir = Path.Combine(wkdir, clsdirname); //Directory.CreateDirectory(clsdir); Console.WriteLine(" - Downloading resources for " + clsdirname); //download each resource foreach (KeyValuePair<string, string> resourceLink in classSegment.ResourceLinks) { //Filter here try { Download(resourceLink.Key, wkdir, resourceLink.Value); } catch (Exception e) { Console.WriteLine(" - failed: {0}, {1}", resourceLink.Key, e.Message); //throw e; } } } } if (gzipCourses) { SevenZipCompressor zipCompressor = new SevenZipCompressor(); zipCompressor.CompressDirectory(destDir, courseName + ".7z"); } /* if gzip_courses: tar_file_name = courseName + ".tar.gz" print_("Compressing and storing as " + tar_file_name) tar = tarfile.open(os.path.join(dest_dir, tar_file_name), 'w:gz') tar.add(os.path.join(dest_dir, courseName), arcname=courseName) tar.close() print_("Compression complete. Cleaning up.") shutil.rmtree(os.path.join(dest_dir, courseName)) */ // }
public override Course GetDownloadableContent(string courseName) { //get the lecture url string course_url = LectureUrlFromName(courseName); Course courseContent = new Course(courseName); Console.WriteLine("* Collecting downloadable content from " + course_url); //get the course name, and redirect to the course lecture page //string vidpage = get_page(course_url); string vidpage = _client.DownloadString(course_url); HtmlDocument htmlDoc = new HtmlDocument(); htmlDoc.LoadHtml(vidpage); // ParseErrors is an ArrayList containing any errors from the Load statement if (htmlDoc.ParseErrors != null && htmlDoc.ParseErrors.Any()) { // Handle any parse errors as required } else { if (htmlDoc.DocumentNode != null) { //# extract the weekly classes HtmlNodeCollection weeks = htmlDoc.DocumentNode.SelectNodes("//li[contains(concat(' ', @class, ' '), ' todonav_item week ')]"); //"[@class='course-item-list-header']"); if (weeks != null) { // for each weekly class, go to the page and find the actual content there. int i = 1; foreach (HtmlNode week in weeks) { Console.WriteLine(); Console.WriteLine("* Week " + i + " of " + weeks.Count); HtmlNode a = week.SelectSingleNode("a"); string weekLink = a.Attributes["href"].Value; //.InnerText.Trim(); string weekPage = _client.DownloadString(BASE_URL + weekLink); HtmlDocument weekDoc = new HtmlDocument(); weekDoc.LoadHtml(weekPage); HtmlNode h3txt = weekDoc.DocumentNode.SelectSingleNode("//h3[contains(concat(' ', @class, ' '), ' headline ')]"); string weekTopic = Utilities.sanitise_filename(h3txt.InnerText.Trim()); weekTopic = Utilities.TrimPathPart(weekTopic, Max_path_part_len); Week weeklyContent = new Week(weekTopic); weeklyContent.WeekNum = i++; HtmlNodeCollection weekSteps = weekDoc.DocumentNode.SelectNodes("//li[contains(concat(' ', @class, ' '), ' step ')]"); int j = 1; foreach (HtmlNode weekStep in weekSteps) { Utilities.DrawProgressBar(j, weekSteps.Count, 20, '='); Dictionary <string, string> resourceLinks = new Dictionary <string, string>(); HtmlNode weekStepAnchor = weekStep.SelectSingleNode("a"); string stepNumber = weekStepAnchor.SelectSingleNode("span/div").InnerText; string stepName = weekStepAnchor.SelectSingleNode("div/div/h5").InnerText; string stepType = weekStepAnchor.SelectSingleNode("div/div/span").InnerText; string weekNumber = stepNumber.Trim().Split('.')[0].PadLeft(2, '0'); string videoNumber = stepNumber.Trim().Split('.')[1].PadLeft(2, '0'); stepName.RemoveColon(); stepName = Utilities.sanitise_filename(stepName); stepName = Utilities.TrimPathPart(stepName, Max_path_part_len); string classname = string.Join("-", weekNumber, videoNumber, stepName); string weekStepAnchorHref = weekStepAnchor.Attributes["href"].Value; if (stepType == "video") { string weekStepVideoPage = _client.DownloadString(BASE_URL + weekStepAnchorHref); HtmlDocument weekStepVideoDoc = new HtmlDocument(); weekStepVideoDoc.LoadHtml(weekStepVideoPage); HtmlNode videoObject = weekStepVideoDoc.DocumentNode.SelectSingleNode("//source"); //"[contains(concat(' ', @name, ' '), ' flashvars ')]"); string vidUrl = videoObject.Attributes["src"].Value; string fn = Path.ChangeExtension(classname, "mp4"); resourceLinks.Add("http:" + vidUrl, fn); } else { resourceLinks.Add(BASE_URL + weekStepAnchorHref, Path.ChangeExtension(classname, "html")); // "index.html"); } ClassSegment weekClasses = new ClassSegment(classname); weekClasses.ClassNum = j++; weekClasses.ResourceLinks = resourceLinks; weeklyContent.ClassSegments.Add(weekClasses); } courseContent.Weeks.Add(weeklyContent); } return(courseContent); } } } return(null); }
public override void Download(string courseName, string destDir, bool reverse, bool gzipCourses, Course courseContent) { MakeCourseList(courseContent, Path.Combine(destDir, courseName)); FutureLearnDownloader cd = new FutureLearnDownloader(this); cd.DownloadCourse(courseName, destDir, reverse, gzipCourses, courseContent); }
public override Course GetDownloadableContent(string courseName) { //get the lecture url string course_url = LectureUrlFromName(courseName); Course courseContent = new Course(courseName); Console.WriteLine("* Collecting downloadable content from " + course_url); //get the course name, and redirect to the course lecture page //string vidpage = get_page(course_url); string vidpage = _client.DownloadString(course_url); HtmlDocument htmlDoc = new HtmlDocument(); htmlDoc.LoadHtml(vidpage); // ParseErrors is an ArrayList containing any errors from the Load statement if (htmlDoc.ParseErrors != null && htmlDoc.ParseErrors.Any()) { // Handle any parse errors as required } else { if (htmlDoc.DocumentNode != null) { //# extract the weekly classes HtmlNodeCollection weeks = htmlDoc.DocumentNode.SelectNodes("//li[contains(concat(' ', @class, ' '), ' todonav_item week ')]"); //"[@class='course-item-list-header']"); if (weeks != null) { // for each weekly class, go to the page and find the actual content there. int i = 1; foreach (HtmlNode week in weeks) { Console.WriteLine(); Console.WriteLine("* Week " + i + " of " + weeks.Count); HtmlNode a = week.SelectSingleNode("a"); string weekLink = a.Attributes["href"].Value; //.InnerText.Trim(); string weekPage = _client.DownloadString(BASE_URL + weekLink); HtmlDocument weekDoc = new HtmlDocument(); weekDoc.LoadHtml(weekPage); HtmlNode h3txt = weekDoc.DocumentNode.SelectSingleNode("//h3[contains(concat(' ', @class, ' '), ' headline ')]"); string weekTopic = Utilities.sanitise_filename(h3txt.InnerText.Trim()); weekTopic = Utilities.TrimPathPart(weekTopic, Max_path_part_len); Week weeklyContent = new Week(weekTopic); weeklyContent.WeekNum = i++; HtmlNodeCollection weekSteps = weekDoc.DocumentNode.SelectNodes("//li[contains(concat(' ', @class, ' '), ' step ')]"); int j = 1; foreach (HtmlNode weekStep in weekSteps) { Utilities.DrawProgressBar(j, weekSteps.Count, 20, '='); Dictionary<string, string> resourceLinks = new Dictionary<string, string>(); HtmlNode weekStepAnchor = weekStep.SelectSingleNode("a"); string stepNumber = weekStepAnchor.SelectSingleNode("span/div").InnerText; string stepName = weekStepAnchor.SelectSingleNode("div/div/h5").InnerText; string stepType = weekStepAnchor.SelectSingleNode("div/div/span").InnerText; string weekNumber = stepNumber.Trim().Split('.')[0].PadLeft(2, '0'); string videoNumber = stepNumber.Trim().Split('.')[1].PadLeft(2, '0'); stepName.RemoveColon(); stepName = Utilities.sanitise_filename(stepName); stepName = Utilities.TrimPathPart(stepName, Max_path_part_len); string classname = string.Join("-", weekNumber, videoNumber, stepName); string weekStepAnchorHref = weekStepAnchor.Attributes["href"].Value; if (stepType == "video") { string weekStepVideoPage = _client.DownloadString(BASE_URL + weekStepAnchorHref); HtmlDocument weekStepVideoDoc = new HtmlDocument(); weekStepVideoDoc.LoadHtml(weekStepVideoPage); HtmlNode videoObject = weekStepVideoDoc.DocumentNode.SelectSingleNode("//source"); //"[contains(concat(' ', @name, ' '), ' flashvars ')]"); string vidUrl = videoObject.Attributes["src"].Value; string fn = Path.ChangeExtension(classname, "mp4"); resourceLinks.Add("http:" + vidUrl, fn); } else { resourceLinks.Add(BASE_URL + weekStepAnchorHref, Path.ChangeExtension(classname, "html")); // "index.html"); } ClassSegment weekClasses = new ClassSegment(classname); weekClasses.ClassNum = j++; weekClasses.ResourceLinks = resourceLinks; weeklyContent.ClassSegments.Add(weekClasses); } courseContent.Weeks.Add(weeklyContent); } return courseContent; } } } return null; }
/// <summary> /// Given the video lecture URL of the course, return a list of all downloadable resources. /// </summary> public override Course GetDownloadableContent(string courseName) { //get the lecture url string course_url = LectureUrlFromName(courseName); Course courseContent = new Course(courseName); Console.WriteLine("* Collecting downloadable content from " + course_url); //get the course name, and redirect to the course lecture page string vidpage = get_page(course_url); HtmlDocument htmlDoc = new HtmlDocument(); htmlDoc.LoadHtml(vidpage); // ParseErrors is an ArrayList containing any errors from the Load statement if (htmlDoc.ParseErrors != null && htmlDoc.ParseErrors.Any()) { // Handle any parse errors as required } else { if (htmlDoc.DocumentNode != null) { //# extract the weekly classes HtmlNodeCollection weeks = htmlDoc.DocumentNode.SelectNodes("//div[contains(concat(' ', @class, ' '), ' course-item-list-header ')]"); //"[@class='course-item-list-header']"); if (weeks != null) { // for each weekly class int i = 0; foreach (HtmlNode week in weeks) { Console.WriteLine(); Console.WriteLine("* Week " + i + " of " + weeks.Count); HtmlNode h3 = week.SelectSingleNode("./h3"); // sometimes the first week are the hidden sample lectures, catch this string h3txt; if (h3.InnerText.Trim().StartsWith("window.onload")) { h3txt = "Sample Lectures"; } else { h3txt = h3.InnerText.Trim(); } string weekTopic = Utilities.sanitise_filename(h3txt); weekTopic = Utilities.TrimPathPart(weekTopic, Max_path_part_len); Week weeklyContent = new Week(weekTopic); weeklyContent.WeekNum = i++; //get all the classes for the week HtmlNode ul = week.NextSibling; HtmlNodeCollection lis = ul.SelectNodes("li"); //for each class (= lecture) int j = 0; foreach (HtmlNode li in lis) { Utilities.DrawProgressBar(j, lis.Count, 20, '='); Dictionary<string, string> resourceLinks = new Dictionary<string, string>(); //the name of this class string className = li.SelectSingleNode("a").InnerText.Trim(); className.RemoveColon(); className = Utilities.sanitise_filename(className); className = Utilities.TrimPathPart(className, Max_path_part_len); //collect all the resources for this class (ppt, pdf, mov, ..) HtmlNodeCollection classResources = li.SelectNodes("./div[contains(concat(' ', @class, ' '), ' course-lecture-item-resource ')]/a"); foreach (HtmlNode classResource in classResources) { //get the hyperlink itself string h = Utilities.clean_url(classResource.GetAttributeValue("href", "")); if (string.IsNullOrEmpty(h)) { continue; } //Sometimes the raw, uncompresed source videos are available as //well. Don't download them as they are huge and available in //compressed form anyway. if (h.Contains("source_videos")) { Console.WriteLine(" - will skip raw source video " + h); } else { if (!resourceLinks.ContainsKey(h)) { //Dont set a filename here, that will be inferred from the week titles resourceLinks.Add(h, className); } } } //check if the video is included in the resources, if not, try do download it directly bool containsMp4 = resourceLinks.Any(s => s.Key.Contains(".mp4")); if (!containsMp4) { HtmlNode ll = li.SelectSingleNode("./a[contains(concat(' ', @class, ' '), ' lecture-link ')]"); string lurl = Utilities.clean_url(ll.GetAttributeValue("data-modal-iframe", "")); try { //HttpWebResponse httpWebResponse = get_response(lurl); //string html = new WebClient().DownloadString(lurl); WebClient wc = new WebClient(); wc.DownloadStringCompleted += WcOnDownloadStringCompleted; wc.DownloadStringAsync(new Uri(lurl)); System.Threading.Thread.Sleep(3000); wc.CancelAsync(); string page = get_page(lurl); HtmlDocument bb = new HtmlDocument(); bb.LoadHtml(lurl); //string page = get_page(lurl); //HtmlWeb bb = new HtmlWeb(); //HtmlDocument doc = bb.Load(lurl); HtmlNode selectSingleNode = bb.DocumentNode.SelectSingleNode("div"); //"[contains(concat(' ', @type, ' '), 'video/mp4')]"); if (selectSingleNode.OuterHtml.Length < 1) { Console.WriteLine(string.Format(" Warning: Failed to find video for {0}", className)); } else { string vurl = Utilities.clean_url(selectSingleNode.SelectSingleNode("src").OuterHtml); //build the matching filename string fn = Path.ChangeExtension(className, "mp4"); resourceLinks.Add(vurl, fn); } } catch (Exception e) { // sometimes there is a lecture without a vidio (e.g., // genes-001) so this can happen. Console.WriteLine(string.Format(" Warning: failed to open the direct video link {0}: {1}", lurl, e)); } } ClassSegment weekClasses = new ClassSegment(className); weekClasses.ClassNum = j++; weekClasses.ResourceLinks = resourceLinks; weeklyContent.ClassSegments.Add(weekClasses); } courseContent.Weeks.Add(weeklyContent); } return courseContent; } } } return null; }
public override void Download(string courseName, string destDir, bool b, bool gzipCourses, Course courseContent) { MakeCourseList(courseContent, Path.Combine(destDir, courseName)); //CourseraDownloader cd = new CourseraDownloader(this); //cd.DownloadCourse(courseName, destDir, b, gzipCourses, courseContent); }
/// <summary> /// Download all the contents (quizzes, videos, lecture notes, ...) of the course to the given destination directory (defaults to .) /// </summary> /// <param name="courseName"> </param> /// <param name="destDir"></param> /// <param name="reverse"></param> /// <param name="gzipCourses"></param> /// <param name="weeklyTopics"> </param> public virtual void DownloadCourse(string courseName, string destDir, bool reverse, bool gzipCourses, Course weeklyTopics) { if (!weeklyTopics.Weeks.Any()) { Console.WriteLine(" Warning: no downloadable content found for {0}, did you accept the honour code?", courseName); } else { Console.WriteLine(" * Got all downloadable content for {0} ", courseName); } if (reverse) { weeklyTopics.Weeks.Reverse(); } //where the course will be downloaded to string courseDir = Path.Combine(destDir, courseName); //if (!Directory.Exists(courseDir)) //{ // Directory.CreateDirectory(courseDir); //} Console.WriteLine("* " + courseName + " will be downloaded to " + courseDir); //download the standard pages Console.WriteLine(" - Downloading lecture/syllabus pages"); Download(string.Format(_courseraCourse.HOME_URL, courseName), courseDir, "index.html"); Download(string.Format(_courseraCourse.LectureUrlFromName(courseName)), courseDir, "lectures.html"); try { DownloadAbout(courseName, courseDir, _courseraCourse.ABOUT_URL); } catch (Exception e) { Console.WriteLine("Warning: failed to download about file: {0}", e.Message); } //now download the actual content (video's, lecture notes, ...) foreach (Week week in weeklyTopics.Weeks) { //TODO: filter /*if (Wk_filter && week.Key) { } * * if self.wk_filter and j not in self.wk_filter: print_(" - skipping %s (idx = %s), as it is not in the week filter" % (weeklyTopic, j)) continue */ // add a numeric prefix to the week directory name to ensure // chronological ordering string wkdirname = week.WeekNum.ToString().PadLeft(2, '0') + " - " + week.WeekName; //ensure the week dir exists Console.WriteLine(" - " + week.WeekName); string wkdir = Path.Combine(courseDir, wkdirname); Directory.CreateDirectory(wkdir); foreach (ClassSegment classSegment in week.ClassSegments) { //ensure chronological ordering string clsdirname = classSegment.ClassNum.ToString().PadLeft(2, '0') + " - " + classSegment.ClassName; //ensure the class dir exists string clsdir = Path.Combine(wkdir, clsdirname); clsdir = Utilities.TrimPathPart(clsdir, _courseraCourse.Max_path_part_len - 15); Directory.CreateDirectory(clsdir); Console.WriteLine(" - Downloading resources for " + classSegment.ClassName); //download each resource foreach (KeyValuePair<string, string> resourceLink in classSegment.ResourceLinks) { try { Download(resourceLink.Key, clsdir, resourceLink.Value); } catch (Exception e) { Console.WriteLine(" - failed: {0}, {1}", resourceLink.Key, e.Message); //throw e; } } } } if (gzipCourses) { SevenZipCompressor zipCompressor = new SevenZipCompressor(); zipCompressor.CompressDirectory(destDir, courseName + ".7z"); } /* if gzip_courses: tar_file_name = courseName + ".tar.gz" print_("Compressing and storing as " + tar_file_name) tar = tarfile.open(os.path.join(dest_dir, tar_file_name), 'w:gz') tar.add(os.path.join(dest_dir, courseName), arcname=courseName) tar.close() print_("Compression complete. Cleaning up.") shutil.rmtree(os.path.join(dest_dir, courseName)) */ }