public CommentSaveState AddComment(string postid, Comment comment) { var saveState = CommentSaveState.Failed; if (!dasBlogSettings.SiteConfiguration.EnableComments) { return(CommentSaveState.SiteCommentsDisabled); } var entry = dataService.GetEntry(postid); if (entry != null) { if (dasBlogSettings.SiteConfiguration.EnableComments) { var targetComment = DateTime.UtcNow.AddDays(-1 * dasBlogSettings.SiteConfiguration.DaysCommentsAllowed); if (targetComment > entry.CreatedUtc) { return(CommentSaveState.PostCommentsDisabled); } } dataService.AddComment(comment); saveState = CommentSaveState.Added; } else { saveState = CommentSaveState.NotFound; } return(saveState); }
public CommentSaveState AddComment(string postid, Comment comment) { var saveState = CommentSaveState.Failed; var entry = dataService.GetEntry(postid); if (!dasBlogSettings.SiteConfiguration.EnableComments || !entry.AllowComments) { return(CommentSaveState.SiteCommentsDisabled); } if (entry != null) { var targetComment = DateTime.UtcNow.AddDays(-1 * dasBlogSettings.SiteConfiguration.DaysCommentsAllowed); if ((targetComment > entry.CreatedUtc)) { return(CommentSaveState.PostCommentsDisabled); } // FilterHtml html encodes anything we don't like string filteredText = dasBlogSettings.FilterHtml(comment.Content); comment.Content = filteredText; if (dasBlogSettings.SiteConfiguration.SendCommentsByEmail) { var actions = ComposeMailForUsers(entry, comment); dataService.AddComment(comment, actions); } else { dataService.AddComment(comment); } saveState = CommentSaveState.Added; } else { saveState = CommentSaveState.NotFound; } return(saveState); }
public CommentSaveState AddComment(string postid, Comment comment) { CommentSaveState est = CommentSaveState.Failed; Entry entry = _dataService.GetEntry(postid); if (entry != null) { // Are comments allowed _dataService.AddComment(comment); est = CommentSaveState.Added; } else { est = CommentSaveState.NotFound; } return(est); }
public void ProcessRequest(HttpContext context) { if (context.Request.ContentType == "text/xml" && context.Request.RequestType == "POST" && context.Request.QueryString["guid"] != null) { try { ILoggingDataService logService = LoggingDataServiceFactory.GetService(SiteConfig.GetLogPathFromCurrentContext()); IBlogDataService dataService = BlogDataServiceFactory.GetService(SiteConfig.GetContentPathFromCurrentContext(), logService); DataCache cache = CacheFactory.GetCache(); Entry entry = dataService.GetEntry(context.Request.QueryString["guid"]); if (entry != null && DasBlog.Web.Core.SiteUtilities.AreCommentsAllowed(entry, SiteConfig.GetSiteConfig())) { XmlSerializer ser = new XmlSerializer(typeof(RssItem)); RssItem item = (RssItem)ser.Deserialize(context.Request.InputStream); if (item != null) { Comment c = new Comment(); c.Initialize(); foreach (XmlElement el in item.anyElements) { if (el.NamespaceURI == "http://purl.org/dc/elements/1.1/" && el.LocalName == "creator") { c.Author = el.InnerText; break; } } c.AuthorEmail = item.Author; c.AuthorHomepage = item.Link; c.AuthorIPAddress = context.Request.UserHostAddress; c.Content = context.Server.HtmlEncode(item.Description); c.TargetEntryId = entry.EntryId; c.TargetTitle = ""; dataService.AddComment(c); // TODO: no comment mail? // break the caching cache.Remove("BlogCoreData"); context.Response.StatusCode = 200; context.Response.SuppressContent = true; context.Response.End(); } } else if (entry != null && !entry.AllowComments) { context.Response.StatusCode = 403; // Forbidden context.Response.SuppressContent = true; context.Response.End(); } else if (entry == null) { context.Response.StatusCode = 404; // Not Found context.Response.SuppressContent = true; context.Response.End(); } } catch (Exception exc) { ErrorTrace.Trace(System.Diagnostics.TraceLevel.Error, exc); } } }
static int Main(string[] args) { Console.WriteLine("BlogWorksXML Importer"); Console.WriteLine("(import supports BlogWorks version 1.1 and above)"); foreach (string arg in args) { if (arg.Length > 6 && arg.ToLower().StartsWith("/from:")) { from = arg.Substring(6).Trim(); if (from[0] == '\"' && from[from.Length] == '\"') { from = from.Substring(1, from.Length - 2); } } else if (arg.Length > 6 && arg.ToLower().StartsWith("/id:")) { id = arg.Substring(4).Trim(); } else if (arg.Length > 6 && arg.ToLower().StartsWith("/to:")) { to = arg.Substring(4).Trim(); if (to[0] == '\"' && to[from.Length] == '\"') { to = to.Substring(1, to.Length - 2); } } else { break; } } if (from == null || to == null || id == null || from.Length == 0 || to.Length == 0 || id.Length == 0) { Console.WriteLine("Usage: impbwxml /from:<blogworks data directory> [/id:<blogworks blog id, e.g. 001>] /to:<output directory>"); Console.WriteLine(""); return(-1); } IBlogDataService dataService = BlogDataServiceFactory.GetService(to, null); Console.WriteLine("Importing entries from..."); ArrayList tables = new ArrayList(); ArrayList comments = new ArrayList(); Hashtable commentRefs = new Hashtable(); XmlDocument masterDoc = new XmlDocument(); StringBuilder sb = new StringBuilder(); sb.Append("<tables>"); foreach (FileInfo file in new DirectoryInfo(from).GetFiles("*archive" + id + ".xml")) { Console.Write(" * " + file.Name); XmlDocument doc = new XmlDocument(); doc.Load(file.FullName); foreach (XmlNode n in doc.SelectNodes("/baef/blog")) { sb.Append(n.OuterXml); } Console.WriteLine(" ... done."); } sb.Append("</tables>"); masterDoc.Load(new StringReader(sb.ToString())); foreach (XmlNode node in masterDoc.FirstChild) { BlogWorksTable table = new BlogWorksTable(); table.Name = node.Attributes["id"].Value; foreach (XmlNode child in node) // author with authorname, authormail childs { switch (child.Name) { case "author": break; // ignore. dasBlog is not yet multiuser enabled case "information": foreach (XmlNode infoNode in child) // commentthread; timestamp; language; categories // how about commentthread ? { switch (infoNode.Name) { case "commentthread": // save the reference for later use commentRefs.Add(infoNode.InnerText, table.Name); break; case "timestamp": table.Data[infoNode.Name] = UnixToHuman(infoNode.InnerText); break; case "language": if (infoNode.InnerText != "en") { table.Data[infoNode.Name] = infoNode.InnerText; } break; case "categories": foreach (XmlNode catNode in infoNode) { if (catNode.InnerText.Length > 0) { if (table.Data.Contains("categories")) { table.Data["categories"] = (string)table.Data["categories"] + ";" + catNode.InnerText; } else { table.Data["categories"] = catNode.InnerText; } } } break; } } if (!table.Data.Contains("categories")) { table.Data["categories"] = "General"; } break; case "text": // blogtitle (entry title); blogbody (entry body) foreach (XmlNode textNode in child) { switch (textNode.Name) { case "blogtitle": table.Data[textNode.Name] = textNode.InnerText; break; case "blogbody": table.Data[textNode.Name] = textNode.InnerText; break; } } break; } } tables.Add(table); } Console.WriteLine("Now writing entries...."); foreach (BlogWorksTable table in tables) { Entry entry = new Entry(); entry.CreatedUtc = table.When; entry.Title = table.Title; entry.Content = table.Text; entry.Categories = table.Categories; entry.EntryId = table.UniqueId; entry.Language = table.Language; dataService.SaveEntry(entry); } Console.WriteLine("Finished. Start reading comments..."); masterDoc = new XmlDocument(); sb = new StringBuilder(); sb.Append("<comments>"); foreach (FileInfo file in new DirectoryInfo(from).GetFiles("*comment" + id + ".xml")) { Console.Write(" * " + file.Name); XmlDocument doc = new XmlDocument(); doc.Load(file.FullName); foreach (XmlNode n in doc.SelectNodes("/comments/thread")) { sb.Append(n.OuterXml); } Console.WriteLine(" ... done."); } sb.Append("</comments>"); masterDoc.Load(new StringReader(sb.ToString())); foreach (XmlNode node in masterDoc.FirstChild) { string threadId = node.Attributes["id"].Value; if (!commentRefs.Contains(threadId)) { continue; } foreach (XmlNode cmtNode in node) // comment's per thread { BlogWorksComment comment = new BlogWorksComment(); comment.Name = (string)commentRefs[threadId]; // get corresponding entry Id foreach (XmlNode child in cmtNode) // comment elements { switch (child.Name) { case "name": comment.Data[child.Name] = child.InnerText; // Author break; case "datetime": comment.Data[child.Name] = DateTime.Parse(child.InnerText); break; case "email": comment.Data[child.Name] = child.InnerText; break; case "uri": if (child.InnerText.Length > 7 /* "http://".Length */) { comment.Data[child.Name] = child.InnerText; } break; case "text": comment.Data[child.Name] = child.InnerText; break; case "ip": comment.Data[child.Name] = child.Clone(); // anyElement break; } } //child comments.Add(comment); } //cmtNode } Console.WriteLine("Now writing comment entries...."); foreach (BlogWorksComment cmt in comments) { Comment comment = new Comment(); comment.Content = cmt.Text; comment.Author = cmt.Author; comment.TargetEntryId = cmt.UniqueId; comment.AuthorHomepage = cmt.AuthorHomepage; comment.AuthorEmail = cmt.AuthorEmail; comment.CreatedLocalTime = cmt.When; comment.CreatedUtc = cmt.When.ToUniversalTime(); comment.anyElements = new XmlElement[] { cmt.Ip }; dataService.AddComment(comment); } Console.WriteLine("Finished. Start reading comments..."); Console.WriteLine("Finished successfully."); return(0); }
static int Main(string[] args) { try { #region Command Line Parsing string inputdir = null; string outputdir = null; for (int i = 0; i < args.Length; ++i) { if (args[i] == "-inputdir") { inputdir = args[++i]; } else if (args[i] == "-outputdir") { outputdir = args[++i]; } } if (inputdir == null || outputdir == null) { PrintUsage(); return(ERRORWRONGUSAGE); } // Canonicalize and expand path to full path inputdir = Path.GetFullPath(inputdir); outputdir = Path.GetFullPath(outputdir); if (!Directory.Exists(inputdir)) { Console.WriteLine(inputdir + " does not exist or is not a directory"); return(ERRORINPUTDIRNOTFOUND); } if (!Directory.Exists(outputdir)) { Console.WriteLine(outputdir + " does not exist or is not a directory"); return(ERROROUTPUTDIRNOTFOUND); } #endregion Command Line Parsing IBlogDataService dsInput = BlogDataServiceFactory.GetService(inputdir, null); IBlogDataService dsOutput = BlogDataServiceFactory.GetService(outputdir, null); Console.WriteLine("Porting posts"); // Copy all dayentry files to output directory // This shouldn't require any conversion, since the format and naming convention matches // between BlogX and dasBlog EntryCollection entries = dsInput.GetEntriesForDay( DateTime.MaxValue.AddDays(-2), TimeZone.CurrentTimeZone, String.Empty, int.MaxValue, int.MaxValue, String.Empty); //Hashtable lookup = new Hashtable(); foreach (Entry e in entries) { //lookup[e.EntryId] = e; dsOutput.SaveEntry(e); Console.Write("."); } Console.WriteLine(); Console.WriteLine("Posts successfully ported"); Console.WriteLine("Porting comments"); // TODO: Read in all dayextra files from input directory int commentCount = 0; string[] commentFiles = Directory.GetFiles(inputdir, "*.dayextra.xml"); foreach (string commentFile in commentFiles) { // TODO: Match up comments with DayEntry, emit comment XPathDocument doc = new XPathDocument(Path.Combine(inputdir, commentFile)); XPathNavigator nav = doc.CreateNavigator(); XPathNodeIterator commentNodes = nav.Select("//Comment"); while (commentNodes.MoveNext()) { Comment comment = new Comment(); XPathNavigator commentNode = commentNodes.Current; comment.Content = (string)commentNode.Evaluate("string(Content)"); comment.CreatedUtc = DateTime.Parse((string)commentNode.Evaluate("string(Created)")); comment.ModifiedUtc = DateTime.Parse((string)commentNode.Evaluate("string(Modified)")); comment.EntryId = (string)commentNode.Evaluate("string(EntryId)"); comment.TargetEntryId = (string)commentNode.Evaluate("string(TargetEntryId)"); comment.Author = (string)commentNode.Evaluate("string(Author)"); comment.AuthorEmail = (string)commentNode.Evaluate("string(AuthorEmail)"); comment.AuthorHomepage = (string)commentNode.Evaluate("string(AuthorHomepage)"); dsOutput.AddComment(comment); Console.Write("."); ++commentCount; } } Console.WriteLine(); Console.WriteLine("{0} comments successfully imported!", commentCount); } catch (Exception e) { // Return nonzero so automated tools can tell it failed Console.WriteLine(e.ToString()); return(ERROREXCEPTION); } return(SUCCESS); }
public static int Import(string userId, string contentDirectory, string commentServer) { if (commentServer == null || commentServer.Length == 0) { // Set default comment server commentServer = DefaultCommentServer; } if (commentServer == null || userId == null || contentDirectory == null || commentServer.Length == 0 || userId.Length == 0 || contentDirectory.Length == 0) { throw new ArgumentException("commentServer, userId and contentDirectory are required."); } //This tool assumes that you have already imported your radio data // Those imported posts have a "non-guid" entryid // We'll enumerate those posts and check the radio comment service // collecting (scraping) comments and injecting them into AllComments.xml in your // dasBlog content directory. ArrayList entriesWithCommentsToFetch = new ArrayList(); Console.WriteLine("Importing entries..."); IBlogDataService dataService = BlogDataServiceFactory.GetService(contentDirectory, null); EntryCollection entries = dataService.GetEntriesForDay(DateTime.MaxValue.AddDays(-2), TimeZone.CurrentTimeZone, String.Empty, int.MaxValue, int.MaxValue, String.Empty); foreach (Entry e in entries) { //Since Radio Entries are numbers, NOT Guids, we'll try to Parse them // as longs, and if it fails, it's arguably a Guid. try { long.Parse(e.EntryId); Console.WriteLine(String.Format("Found Imported Radio Entry: {0}", e.EntryId)); entriesWithCommentsToFetch.Add(e.EntryId); } catch {} } foreach (string entryId in entriesWithCommentsToFetch) { string commentHtml = FetchRadioCommentHtml(commentServer, userId, entryId); if (commentHtml.IndexOf("No comments found.") == -1) { Regex commentRegex = new Regex(@"class=""comment"">(?<comment>\r\n(.*[^<]))", RegexOptions.Multiline | RegexOptions.IgnoreCase | RegexOptions.Compiled); Regex datesRegex = new Regex(@"(?<month>\d+)/(?<day>\d+)/(?<year>\d+); (?<hour>\d+):(?<min>\d+):(?<sec>\d+) (?<meridian>[A|P]M)</div>", RegexOptions.Multiline | RegexOptions.IgnoreCase | RegexOptions.Compiled); Regex namesRegex = new Regex(@"<div class=""date"">(?<name>.*)( •)", RegexOptions.Multiline | RegexOptions.IgnoreCase | RegexOptions.Compiled); MatchCollection commentMatches = commentRegex.Matches(commentHtml); MatchCollection datesMatches = datesRegex.Matches(commentHtml); MatchCollection namesMatches = namesRegex.Matches(commentHtml); if (commentMatches.Count != datesMatches.Count || datesMatches.Count != namesMatches.Count) { continue; } //Now we've got "n" parallel arrays... //"For each comment we've found" for (int i = 0; i < commentMatches.Count; i++) { //Get the raw data string content = commentMatches[i].Groups["comment"].Value; string unparsedDate = datesMatches[i].Value; string name = namesMatches[i].Groups["name"].Value; string homepage = String.Empty; //Parse the Date...yank the end div (I'm not good at RegEx) int divLoc = unparsedDate.IndexOf("</div>"); if (divLoc != -1) { unparsedDate = unparsedDate.Remove(divLoc, 6); } DateTime date = DateTime.ParseExact(unparsedDate, @"M/d/yy; h:mm:ss tt", CultureInfo.InvariantCulture); //Their captured name may be surrounded in an href... // the href is their homepage int hrefLoc = name.IndexOf(@""""); if (hrefLoc != -1) { //Get their HomePage URL int hrefLen = name.LastIndexOf(@"""") - hrefLoc; homepage = name.Substring(hrefLoc + 1, hrefLen - 1); //Get their name int nameLoc = name.IndexOf(@">"); if (nameLoc != -1) { int nameLen = name.LastIndexOf(@"</") - nameLoc; name = name.Substring(nameLoc + 1, nameLen - 1); } } //else it's just the name, so leave "name" as-is Comment comment = new Comment(); comment.Content = content.Trim(); comment.Author = name; //comment.EntryId = entryId; comment.TargetEntryId = entryId; comment.AuthorHomepage = homepage; comment.AuthorEmail = String.Empty; comment.CreatedLocalTime = date; comment.CreatedUtc = date.ToUniversalTime(); Console.WriteLine(String.Format("Fetched comment {0} from Radio:", comment.EntryId)); Console.WriteLine(String.Format(" Author: {0}", comment.Author)); Console.WriteLine(String.Format(" Site: {0}", comment.AuthorHomepage)); Console.WriteLine(String.Format(" Date: {0}", comment.CreatedLocalTime)); Console.WriteLine(String.Format(" Content: {0}", comment.Content)); dataService.AddComment(comment); } } else { Console.WriteLine(String.Format("No comments for Radio Post {0}", entryId)); } } return(0); }