Пример #1
0
        public XDoc Calendar(
            [DekiExtParam("Google calendar feed uri ")] XUri uri,
            [DekiExtParam("starting date (default: today)", true)] string startDate,
            [DekiExtParam("ending date (default: 7 days from today)", true)] string endDate,
            [DekiExtParam("calendar width (default: 800)", true)] float? width,
            [DekiExtParam("calendar height (default: 800)", true)] float? height
        ) {
            XDoc result = new XDoc("html");

            // try reading supplied dates
            DateTime start;
            DateTime end;
            if(!DateTime.TryParse(startDate, out start)) {
                start = DateTime.UtcNow;
            }
            if(!DateTime.TryParse(endDate, out end)) {
                end = DateTime.UtcNow.AddDays(7);
            }

            Plug calPlug = Plug.New(uri).With("start-min", start.ToString("s")).With("start-max", end.ToString("s"));
            DreamMessage response = calPlug.GetAsync().Wait();
            if(response.IsSuccessful) {
                XDoc doc = response.ToDocument();
                if(doc.HasName("feed")) {
                    XAtomFeed calFeed = new XAtomFeed(doc);
                    calFeed.UsePrefix("atom", "http://www.w3.org/2005/Atom");
                    XUri embedUri = calFeed["atom:link[@rel='alternate']/@href"].AsUri;
                    result = NewIFrame(embedUri, width ?? 800, height ?? 800);
                } else {

                    // BUGBUGBUG (steveb): user provided an embeddable representation; we won't be able to use the start and end date parameters then.

                    result = NewIFrame(uri, width ?? 800, height ?? 800);
                }
            }
            return result;
        }
Пример #2
0
        private Tuplet<MimeType, XDoc> MakeNewsFeed(IEnumerable<RecentChangeEntry> recentchanges, XUri feedUri, string feedTitle, FeedFormat format, DateTime since) {
            var resources = DekiContext.Current.Resources;
            var changes = new List<RecentChangeEntry>();
            DekiContext deki = DekiContext.Current;
            bool diffCacheEnabled = deki.Instance.RecentChangesDiffCaching;

            // check if we need to merge change entries
            MimeType mime = MimeType.XML;
            if((format == FeedFormat.ATOM_DAILY) || (format == FeedFormat.RAW_DAILY)) {

                // combine changes that occurred on the same day
                Dictionary<string, DigestLookupEntry> pageLookup = new Dictionary<string, DigestLookupEntry>();
                Dictionary<string, DigestLookupEntry> commentLookup = new Dictionary<string, DigestLookupEntry>();
                Dictionary<string, ulong> commentDescriptToCommentLookup = new Dictionary<string, ulong>();
                List<Dictionary<string, KeyValuePair<string, int>>> authors = new List<Dictionary<string, KeyValuePair<string, int>>>();
                int index = 0;
                foreach(var change in recentchanges) {
                    ulong pageId = change.CurId;
                    if(pageId == 0) {

                        // should never happen, but if it does, just ignore this entry
                        continue;
                    }
                    DateTime timestamp = change.Timestamp;
                    NS ns = change.Namespace;
                    RC type = change.Type;
                    string author = change.Username;
                    string fullname = change.Fullname ?? change.Username;

                    // check if we processing a comment or page change
                    if(Utils.IsPageComment(type)) {
                        ulong commentId = change.CmntId ?? 0;
                        string comment = change.Comment;
                        if(commentId == 0) {

                            // NOTE (steveb): because the recentchanges table is brain dead, we sometimes cannot associate a comment change with the comment that affected it;
                            //                luckily, when that happens, there is a good chance that the description for the change is the same as an earlier one;
                            //                so all we need to do is to lookup the previous change using the current change description.

                            if(!commentDescriptToCommentLookup.TryGetValue(comment ?? string.Empty, out commentId)) {
                                continue;
                            }
                        } else if(comment != null) {
                            commentDescriptToCommentLookup[comment] = commentId;
                        }

                        // remove revision number (not applicable)
                        change.Revision = 0;

                        // check if we need to merge this change with a previous one
                        DigestLookupEntry entry;
                        string key = string.Format("{0}-{1}", commentId, timestamp.DayOfYear);
                        if(commentLookup.TryGetValue(key, out entry)) {
                            var item = changes[entry.Index];
                            ++item.EditCount;

                            // append the change comments
                            if(item.ExtraComments == null) {
                                item.ExtraComments = new List<Tuplet<string, string, string>>();

                                // first add the existing comment to the list
                                item.ExtraComments.Add(new Tuplet<string, string, string>(item.Username, item.Fullname, item.Comment));
                            }
                            item.ExtraComments.Add(new Tuplet<string, string, string>(change.Username, change.Fullname, change.Comment));

                            // updated edit count for author
                            KeyValuePair<string, int> authorEdits;
                            authors[entry.Index].TryGetValue(author, out authorEdits);
                            authors[entry.Index][author] = new KeyValuePair<string, int>(fullname, authorEdits.Value + 1);
                        } else {
                            change.EditCount = 1;

                            // NOTE (steveb): we always create the lookup to create a discontinuity with previous changes on the same page;
                            //                this causes ungroupable changes (e.g. MOVE) to split groupable changes; thus avoiding
                            //                that these groupable changes get inproperly grouped since they aren't continuous.

                            // create a new entry, either because this page has no existing entry yet, or the change cannot be grouped with other changes
                            commentLookup[key] = new DigestLookupEntry(timestamp, index, type);
                            authors.Add(new Dictionary<string, KeyValuePair<string, int>>());
                            authors[authors.Count - 1].Add(author, new KeyValuePair<string, int>(fullname, 1));

                            changes.Add(change);
                            ++index;
                        }
                    } else {

                        // add a default edit count
                        if(change.EditCount == 0) {
                            change.EditCount = Utils.IsPageEdit(type) ? 1 : 0;
                        }

                        // check if we need to merge this change with a previous one
                        DigestLookupEntry entry;
                        string key = string.Format("{0}-{1}-{2}", ns, pageId, timestamp.DayOfYear);
                        if(pageLookup.TryGetValue(key, out entry) && Utils.IsPageModification(type) && Utils.IsPageModification(entry.Type)) {
                            var item = changes[entry.Index];

                            // update 'rc_last_oldid' to reflect the older page id of the combined records
                            if(Utils.IsPageEdit(type)) {
                                item.LastOldId = change.LastOldId;
                                item.EditCount = item.EditCount + 1;
                                if(change.Revision != 0) {
                                    item.PreviousRevision = change.Revision - 1;
                                }
                            }

                            // append the change comments
                            if(item.ExtraComments == null) {
                                item.ExtraComments = new List<Tuplet<string, string, string>>();

                                // first add the existing comment to the list
                                item.ExtraComments.Add(new Tuplet<string, string, string>(item.Username, item.Fullname, item.Comment));
                            }
                            item.ExtraComments.Add(new Tuplet<string, string, string>(change.Username, change.Fullname, change.Comment));

                            // updated edit count for author
                            KeyValuePair<string, int> authorEdits;
                            authors[entry.Index].TryGetValue(author, out authorEdits);
                            authors[entry.Index][author] = new KeyValuePair<string, int>(fullname, authorEdits.Value + 1);
                        } else {

                            // NOTE (steveb): we always create the lookup to create a discontinuity with previous changes on the same page;
                            //                this causes ungroupable changes (e.g. MOVE) to split groupable changes; thus avoiding
                            //                that these groupable changes get inproperly grouped since they aren't continuous.

                            // create a new entry, either because this page has no existing entry yet, or the change cannot be grouped with other changes
                            pageLookup[key] = new DigestLookupEntry(timestamp, index, type);
                            authors.Add(new Dictionary<string, KeyValuePair<string, int>>());
                            authors[authors.Count - 1].Add(author, new KeyValuePair<string, int>(fullname, 1));

                            // check if page was changed
                            if(Utils.IsPageEdit(type)) {

                                // update previous revision number
                                change.PreviousRevision = change.Revision - 1;
                            } else if(Utils.IsPageModification(type)) {

                                // set previous revision number
                                change.PreviousRevision = change.Revision;
                            }
                            changes.Add(change);
                            ++index;
                        }
                    }
                }

                // create list of authors as comment line
                for(int i = 0; i < changes.Count; ++i) {
                    var change = changes[i];

                    // create an array of (fullname, username) author names
                    var sortedAuthors = (from author in authors[i] select new KeyValuePair<string, string>(author.Key, author.Value.Key)).ToList();
                    sortedAuthors.Sort((x, y) => StringComparer.OrdinalIgnoreCase.Compare(x.Value, y.Value));
                    string authorList = Utils.LinguisticJoin(from author in sortedAuthors select (string.IsNullOrEmpty(author.Value) ? author.Key : author.Value), resources.Localize(DekiResources.AND()));

                    // add-up all edit operations
                    int editTotal = 0;
                    foreach(KeyValuePair<string, int> edits in authors[i].Values) {
                        editTotal += edits.Value;
                    }

                    // reset comment for standard edits
                    RC type = change.Type;
                    if(Utils.IsPageModification(type) || Utils.IsPageComment(type)) {
                        string summary = null;
                        switch(editTotal) {
                        case 2:
                            summary = resources.Localize(DekiResources.EDIT_SUMMARY_TWO(authorList, editTotal));
                            break;
                        case 1:
                            summary = resources.Localize(DekiResources.EDIT_SUMMARY_ONE(authorList, editTotal));
                            break;
                        case 0:
                            break;
                        default:
                            summary = resources.Localize(DekiResources.EDIT_SUMMARY_MANY(authorList, editTotal));
                            break;
                        }
                        change.Summary = summary;
                    }

                    // reflect that multiple authors edited article, if appropriate
                    change.SortedAuthors = sortedAuthors;
                }

                // check if only the digest format was requested
                if(format == FeedFormat.RAW_DAILY) {
                    XDoc digest = new XDoc("digest");
                    foreach(var change in changes) {
                        change.AppendXml(digest);
                    }
                    return new Tuplet<MimeType, XDoc>(mime, digest);
                }
            } else if(format == FeedFormat.ATOM_ALL) {

                // keep all changes
                foreach(var change in recentchanges) {
                    if(Utils.IsPageEdit(change.Type)) {
                        change.PreviousRevision = change.Revision - 1;
                    } else {
                        change.Revision = 0;
                    }
                    changes.Add(change);
                }
            } else if(format == FeedFormat.DAILY_ACTIVITY) {

                // need to establish how many pages and users exist in total
                var pagesTotal = (int)DbUtils.CurrentSession.Pages_GetCount();
                var usersTotal = (int)DbUtils.CurrentSession.Users_GetCount();

                // daily activity format
                XDoc table = new XDoc("activity").Attr("type", "daily");
                DateTime missing = DateTime.UtcNow.Date;
                foreach(var change in from recentchange in recentchanges
                                      where (recentchange.Namespace == NS.MAIN) || (recentchange.Namespace == NS.USER)
                                      group recentchange by recentchange.Timestamp.Date into recentchangesByDate
                                      select new {
                                          Date = recentchangesByDate.Key,

                                          // count as edited pages, pages that were not created or deleted the same day
                                          PagesEdited = recentchangesByDate.Where(rc => (rc.Type == RC.EDIT) && !recentchangesByDate.Any(rc2 => (rc.CurId == rc2.CurId) && ((rc2.Type == RC.NEW) || (rc2.Type == RC.PAGERESTORED) || (rc.Type == RC.PAGEDELETED)))).Distinct(rc => rc.CurId).Count(),

                                          // count as created pages, pages that were not deleted later the same day
                                          PagesCreated = recentchangesByDate.Count(rc => ((rc.Type == RC.NEW) || (rc.Type == RC.PAGERESTORED)) && !recentchangesByDate.Any(rc2 => (rc2.CurId == rc.CurId) && (rc2.Id < rc.Id) && (rc.Type == RC.PAGEDELETED))),

                                          // count as deleted pages, pages that were not created or restored earlier the same day
                                          PagesDeleted = recentchangesByDate.Count(rc => (rc.Type == RC.PAGEDELETED) && !recentchangesByDate.Any(rc2 => (rc.CurId == rc2.CurId) && (rc2.Id > rc.Id) && ((rc2.Type == RC.NEW) || (rc2.Type == RC.PAGERESTORED)))),

                                          // simple counting of created users
                                          UsersCreated = recentchangesByDate.Count(rc => rc.Type == RC.USER_CREATED)
                                      }
                ) {

                    // check if we need to add empty entries for missing days
                    for(; missing > change.Date; missing = missing.AddDays(-1)) {
                        table.Start("entry").Attr("date", missing)
                            .Elem("pages.total", pagesTotal)
                            .Elem("pages.created", 0)
                            .Elem("pages.edited", 0)
                            .Elem("pages.deleted", 0)
                            .Elem("users.total", usersTotal)
                            .Elem("users.created", 0)
                        .End();
                    }

                    // add this day's entry
                    table.Start("entry").Attr("date", change.Date)
                        .Elem("pages.total", pagesTotal)
                        .Elem("pages.created", change.PagesCreated)
                        .Elem("pages.edited", change.PagesEdited)
                        .Elem("pages.deleted", change.PagesDeleted)
                        .Elem("users.total", usersTotal)
                        .Elem("users.created", change.UsersCreated)
                    .End();
                    
                    // NOTE (steveb): pages total might become negative if user created didn't actually create a user page
                    pagesTotal -= change.PagesCreated - change.PagesDeleted + change.UsersCreated;
                    usersTotal -= change.UsersCreated;

                    // indicate that current is *not* missing
                    missing = change.Date.AddDays(-1);
                }

                // pad with missing records
                for(; missing >= since; missing = missing.AddDays(-1)) {
                    table.Start("entry").Attr("date", missing)
                        .Elem("pages.total", pagesTotal)
                        .Elem("pages.created", 0)
                        .Elem("pages.edited", 0)
                        .Elem("pages.deleted", 0)
                        .Elem("users.total", usersTotal)
                        .Elem("users.created", 0)
                    .End();
                }
                return new Tuplet<MimeType, XDoc>(mime, table);
            } else {

                // unknown or RAW format
                XDoc table = new XDoc("table");
                foreach(var change in recentchanges) {
                    change.AppendXml(table);
                }
                return new Tuplet<MimeType, XDoc>(mime, table);
            }

            // compose feed document
            mime = MimeType.ATOM;
            XAtomFeed feed = new XAtomFeed(feedTitle, feedUri, DateTime.UtcNow) { Language = deki.Instance.SiteLanguage, Id = feedUri };
            Dictionary<string, XDoc> cache = new Dictionary<string, XDoc>();
            foreach(var change in changes) {
                RC type = change.Type;
                if(Utils.IsPageHiddenOperation(type)) {

                    // no real content to produce; let's skip it
                    continue;
                }

                // build feed content
                Title title = Title.FromDbPath(change.Namespace, change.Title, null);
                XDoc description = new XDoc("div");
                AppendDiff(diffCacheEnabled, description, change, type, title, cache);

                // add item to feed
                try {
                    DateTime timestamp = change.Timestamp;
                    XAtomEntry entry = feed.StartEntry(title.AsPrefixedUserFriendlyPath(), timestamp, timestamp);
                    XUri id = XUri.TryParse(Utils.AsPublicUiUri(title));
                    if(id != null) {
                        if(id.Segments.Length == 0) {
                            id = id.WithTrailingSlash();
                        }
                        entry.Id = id.WithFragment(DbUtils.ToString(change.Timestamp));
                    }
                    entry.AddAuthor(((change.SortedAuthors == null) || (change.SortedAuthors.Count == 1)) ? (string.IsNullOrEmpty(change.Fullname) ? change.Username : change.Fullname) : resources.Localize(DekiResources.EDIT_MULTIPLE()), null, null);
                    entry.AddLink(new XUri(Utils.AsPublicUiUri(title)), XAtomBase.LinkRelation.Alternate, null, null, null);
                    entry.AddSummary(MimeType.XHTML, description);
                    feed.End();
                } catch(Exception e) {
                    _log.ErrorExceptionMethodCall(e, "MakeNewsFeed", title.AsPrefixedDbPath());
                }
            }

            // insert <ins> styles
            foreach(XDoc ins in feed[".//ins"]) {
                ins.Attr("style", "color: #009900;background-color: #ccffcc;text-decoration: none;");
            }

            // insert <del> styles
            foreach(XDoc del in feed[".//del"]) {
                del.Attr("style", "color: #990000;background-color: #ffcccc;text-decoration: none;");
            }
            return new Tuplet<MimeType, XDoc>(mime, feed);
        }
Пример #3
0
 private void SaveFeed(XAtomFeed feed)
 {
     Storage.At("feed.xml").Put(feed);
 }
Пример #4
0
 private XAtomFeed LoadFeed()
 {
     var storageResponse = Storage.At("feed.xml").Get(new Result<DreamMessage>()).Wait();
     if(storageResponse.IsSuccessful) {
         return new XAtomFeed(storageResponse.ToDocument());
     }
     var feed = new XAtomFeed("some feed", Self.Uri.AsPublicUri(), DateTime.UtcNow);
     SaveFeed(feed);
     return feed;
 }
Пример #5
0
        public static XDoc GetCommentXmlAsAtom(IList<CommentBE> comments, XUri feedUri, PageBE page) {
            string title = string.Format(DekiResources.COMMENT_FOR, page.Title.AsUserFriendlyName());
            XAtomFeed feed = new XAtomFeed(title, feedUri, DateTime.UtcNow);
            feed.AddLink(PageBL.GetUriUi(page), XAtomBase.LinkRelation.Alternate, MimeType.XHTML, null, page.Title.AsUserFriendlyName());
            feed.Id = feedUri;

            foreach(CommentBE c in comments) {
                UserBE posterUser = UserBL.GetUserById(c.PosterUserId);
                title = c.Title;
                if(string.IsNullOrEmpty(title)) {
                    title = string.Format(DekiResources.COMMENT_BY_TO, posterUser.Name, page.Title.AsUserFriendlyName());
                }
                XAtomEntry entry = feed.StartEntry(title, c.CreateDate, (c.LastEditDate == null || c.LastEditDate == DateTime.MinValue) ? c.CreateDate : c.LastEditDate.Value);
                entry.Id = CommentBL.GetUri(c);
                entry.AddAuthor(posterUser.Name, UserBL.GetUriUiHomePage(posterUser), posterUser.Email);
                MimeType commentMimetype;
                MimeType.TryParse(c.ContentMimeType, out commentMimetype);
                entry.AddContent(c.Content);

                XUri entryLink = PageBL.GetUriUi(page).WithFragment("comment" + c.Number);
                entry.AddLink(entryLink, XAtomBase.LinkRelation.Alternate, null, null, null);
                entry.AddLink(CommentBL.GetUri(c).At("content"), XAtomBase.LinkRelation.Enclosure, commentMimetype, c.Content.Length, "content");
                feed.End();
            }

            return feed;
        }
Пример #6
0
 protected override Yield Stop(Result result)
 {
     _feed = null;
     yield return Coroutine.Invoke(base.Stop, new Result());
     result.Return();
 }
Пример #7
0
 //--- Methods ---
 protected override Yield Start(XDoc config, Result result)
 {
     yield return Coroutine.Invoke(base.Start, config, new Result());
     if(_feed == null) {
         _feed = new XAtomFeed(config["feed-title"].AsText ?? "Atom Feed", Self, DateTime.UtcNow);
         _defaultTTL = config["default-ttl"].AsDouble ?? 3600.0;
         _counter = 0;
     }
     result.Return();
 }
        private XAtomFeed ConvertSearchResultsToOpenSearchAtom(XDoc luceneResults, string query, uint limit, uint offset, string format) {
            string luceneNamespace = "dekilucene";
            int totalResults = 100000;
            bool firstPage = offset < limit;
            bool lastPage = luceneResults["document"].ToList().Count == 0 ? true : false;
            XUri searchUri = DekiContext.Current.ApiUri.At("site", "opensearch").With("q", query).With("format", format);
            XUri self = searchUri.With("offset", Convert.ToString(offset)).With("limit", Convert.ToString(limit));
            XAtomFeed feed = new XAtomFeed("MindTouch Search", self, DateTime.Now);
            feed.UsePrefix("opensearch", "http://a9.com/-/spec/opensearch/1.1/");
            feed.UsePrefix(luceneNamespace, "http://services.mindtouch.com/deki/draft/2007/06/luceneindex");
            feed.UsePrefix("relevance", "http://a9.com/-/opensearch/extensions/relevance/1.0/");
            feed.AddAuthor("MindTouch Core", null, string.Empty);
            feed.Id = self;
            feed.Elem("dekilucene:parsedQuery", luceneResults["parsedQuery"].AsText);

            // HACKHACKHACK show a fake <totalResults> until we run out
            if(!lastPage) {
                feed.Elem("opensearch:totalResults", totalResults);
            }

            if(offset >= limit)
                feed.Elem("opensearch:startIndex", offset);

            feed.Elem("opensearch:itemsPerPage", limit);
            feed.Start("opensearch:Query")
                .Attr("role", "request")
                .Attr("searchTerms", XUri.Encode(query))
                .Attr("startPage", "1")
            .End();
            feed.Start("link")
                .Attr("rel", "alternate")
                .Attr("type", MimeType.HTML.ToString())
                .Attr("href", DekiContext.Current.UiUri.At("Special:Search").With("search", query).With("search", query).With("format", "html").With("limit", limit).With("offset", offset))
            .End();
            feed.Start("link")
                .Attr("rel", "search")
                .Attr("type", "application/opensearchdescription+xml")
                .Attr("href", DekiContext.Current.ApiUri.At("site", "opensearch", "description"))
            .End();
            feed.Start("link")
                .Attr("rel", "first")
                .Attr("href", searchUri.With("offset", Convert.ToString(0)).With("limit", Convert.ToString(limit)))
                .Attr("type", MimeType.ATOM.ToString())
            .End();
            if(!firstPage) {
                feed.Start("link")
                    .Attr("rel", "previous")
                    .Attr("href", searchUri.With("offset", Convert.ToString(offset - limit)).With("limit", Convert.ToString(limit)))
                    .Attr("type", MimeType.ATOM.ToString())
                .End();
            }
            if(!lastPage) {
                feed.Start("link")
                    .Attr("rel", "next")
                    .Attr("href", searchUri.With("offset", Convert.ToString(offset + limit)).With("limit", Convert.ToString(limit)))
                    .Attr("type", MimeType.ATOM.ToString())
                .End();
            }
            if(!lastPage) {
                feed.Start("link")
                    .Attr("rel", "last")
                    .Attr("href", searchUri.With("offset", Convert.ToString(totalResults - limit)).With("limit", Convert.ToString(limit)))
                    .Attr("type", MimeType.ATOM.ToString())
                .End();
            }
            var homepageId = DekiContext.Current.Instance.HomePageId;
            foreach(XDoc document in luceneResults["document"]) {
                var currentNode = feed.AsXmlNode;
                try {
                    bool isPageChild = false;
                    DateTime edited = DbUtils.ToDateTime(document["date.edited"].AsText);
                    XAtomEntry entry = feed.StartEntry(document["title"].AsText, edited, edited);
                    entry.Start("link")
                        .Attr("href", document["uri"].AsUri)
                    .End();
                    entry.Id = document["uri"].AsUri;
                    entry.AddContent(StringUtil.EncodeHtmlEntities(document["preview"].AsText, Encoding.ASCII, false));
                    entry.Elem("relevance:score", document["score"].AsText);
                    entry.Elem("dekilucene:size", document["size"].AsText);
                    entry.Elem("dekilucene:wordcount", document["wordcount"].AsText);
                    entry.Elem("dekilucene:path", document["path"].AsText);
                    if(!document["id.file"].IsEmpty) {
                        entry.Elem("dekilucene:id.file", document["id.file"].AsText);
                        isPageChild = true;
                    } else if(!document["id.comment"].IsEmpty) {
                        entry.Elem("dekilucene:id.comment", document["id.comment"].AsText);
                        isPageChild = true;
                    }
                    var pageId = document["id.page"].AsUInt ?? 0;
                    if(!isPageChild) {
                        entry.Elem("dekilucene:id.page", pageId);
                    }

                    if(pageId != homepageId) {
                        uint parentPageId;
                        string parentPath;
                        string parentTitle;
                        if(isPageChild) {
                            parentPageId = pageId;
                            parentPath = document["path"].AsText;
                            parentTitle = document["title.page"].AsText;
                        } else {
                            parentPageId = document["id.parent"].AsUInt ?? 0;
                            parentPath = document["path.parent"].AsText;
                            parentTitle = document["title.parent"].AsText;
                        }
                        if(parentPath != null && parentTitle != null) {
                            var title = Title.FromPrefixedDbPath(parentPath, parentTitle);
                            entry.Start("dekilucene:page.parent")
                                .Attr("id", parentPageId)
                                .Attr("path", title.AsPrefixedDbPath())
                                .Attr("title", title.AsUserFriendlyName())
                                .Attr("href", DekiContext.Current.ApiUri.At("pages", parentPageId.ToString()))
                                .End();
                        }
                    }
                } catch(Exception e) {
                    _log.Warn("found invalid data in search result. Likely a sign of a corrupted index. Skipping record", e);
                } finally {
                    feed.End(currentNode);
                }
            }
            return feed;
        }