public static CommentBE PostNewComment(PageBE page, DreamMessage request, DreamContext context) { ValidateCommentText(request.ContentType, request.AsText()); CommentBE comment = new CommentBE(); comment.Title = context.GetParam("title", string.Empty); comment.PageId = page.ID; comment.Content = request.AsText(); comment.ContentMimeType = request.ContentType.ToString(); comment.PosterUserId = DekiContext.Current.User.ID; comment.CreateDate = DateTime.UtcNow; //Note (MaxM): Replytoid/replies not yet exposed //ulong replyToId = context.GetParam<ulong>("replyto", 0); //if (replyToId == 0) // newComment.ReplyToId = null; //else // newComment.ReplyToId = replyToId; ushort commentNumber; uint commentId = DbUtils.CurrentSession.Comments_Insert(comment, out commentNumber); if (commentId == 0) { return null; } else { comment.Id = commentId; comment.Number = commentNumber; PageBL.Touch(page, comment.CreateDate); RecentChangeBL.AddCommentCreateRecentChange(comment.CreateDate, page, DekiContext.Current.User, string.Format(DekiResources.COMMENT_ADDED, comment.Number.ToString()), comment); return comment; } }
public virtual PageBE Copy(PageBE to) { to._Comment = _Comment; to._DisplayName = _DisplayName; to._Namespace = _Namespace; to._TimeStamp = _TimeStamp; to._Title = _Title; to._Touched = _Touched; to.ContentType = ContentType; to.ID = ID; to.IsNew = IsNew; to.IsHidden = IsHidden; to.IsRedirect = IsRedirect; to.Language = Language; to.Meta = Meta; to.MinorEdit = MinorEdit; to.ParentID = ParentID; to.RestrictionID = RestrictionID; to.Revision = Revision; to.TextLength = TextLength; to.TIP = TIP; to.UseCache = UseCache; to.UserID = UserID; to.Etag = Etag; return(to); }
public static IList<CommentBE> RetrieveCommentsForPage(PageBE page, CommentFilter filter, bool includePageDescendants, uint? postedByUserId, SortDirection sortDir, uint offset, uint limit, out uint totalComments) { IList<CommentBE> commentsForPage = DbUtils.CurrentSession.Comments_GetByPage(page, filter, includePageDescendants, postedByUserId, sortDir, offset, limit, out totalComments); if(includePageDescendants) { //Filter out comments from pages with no user permissions //NOTE: this will mess up limits/offsets without a way to apply permissions at the db layer. commentsForPage = ApplyPermissionFilter(commentsForPage); } return commentsForPage; }
public static XDoc RetrievePageXDoc(PageBE page, uint pageId, ParserMode mode, string language, bool isInclude, int section, Title relToTitle, bool xhtml, out ParserResult parserResult) { uint contextPageId = pageId; if((mode == ParserMode.VIEW) && (contextPageId != uint.MaxValue) && page.Title.IsTemplate) { // NOTE (steveb): page being rendered is a template and a contextual page was specified; this means we're rendering a global template page PageBE contextPage = GetPageById(contextPageId); if(contextPage == null) { parserResult = new ParserResult(); return null; } parserResult = DekiXmlParser.ParseGlobalTemplate(contextPage, page); } else { parserResult = DekiXmlParser.Parse(page, page.ContentType, language ?? page.Language, page.GetText(DbUtils.CurrentSession), mode, isInclude, section, null, relToTitle); } if(page.Title.IsTemplate && isInclude) { DekiXmlParser.PostProcessTemplateInsertBody(parserResult, page); } // post process tail element DekiXmlParser.PostProcessParserResults(parserResult); // BUGBUGBUG (steveb): we cannot properly restore an old title unless it had a display title set // wrap the result in a content tag and return it to the user XDoc result = new XDoc("content") .Attr("type", parserResult.ContentType) .Attr("etag", page.Etag) .Attr("title", page.CustomTitle ?? page.Title.AsUserFriendlyName()); // check if page contains unsafe content if(mode == ParserMode.EDIT) { result.Attr("unsafe", !DekiScriptLibrary.VerifyXHtml(parserResult.MainBody, true)); } if(xhtml) { result.AddNodes(parserResult.Content); } else { // encode the result as nodes of text foreach(XDoc entry in parserResult.Content.Elements) { if(entry.HasName("body")) { result.Start("body").Attr("target", entry["@target"].AsText).Value(entry.ToInnerXHtml()).End(); } else { result.Elem(entry.Name, entry.ToInnerXHtml()); } } } return result; }
public static void ImportTemplatePages(PageBE page, Title[] templates) { // for each template sub page, obtain its content and create a new subpage with it under the current page foreach(Title template in templates) { PageBE templatePage = PageBL.GetPageByTitle(template); if((0 != templatePage.ID) && (PermissionsBL.IsUserAllowed(DekiContext.Current.User, templatePage, Permissions.READ))) { string[] segments = template.AsUnprefixedDbSegments(); segments[0] = page.Title.AsPrefixedDbPath(); Title newTitle = Title.FromPrefixedDbPath(String.Join("/", segments).Trim('/'), null); PageBE newPage = PageBL.GetPageByTitle(newTitle); if(0 == newPage.ID) { PageBL.Save(newPage, null, templatePage.GetText(DbUtils.CurrentSession), templatePage.ContentType, templatePage.Title.DisplayName, templatePage.Language); } } } }
//--- Methods --- public static void SetRating(PageBE page, UserBE user, float? score) { ThrowOnInvalidLicense(); RatingBE currentRating = DbUtils.CurrentSession.Rating_GetUserResourceRating(user.ID, page.ID, ResourceBE.Type.PAGE); if(score == null) { if(currentRating == null) { // no rating exists currently: noop return; } // reset a user ratings for a page DbUtils.CurrentSession.Rating_ResetUserResourceRating(user.ID, page.ID, ResourceBE.Type.PAGE, DreamContext.Current.StartTime); } else { // set or update a page rating // Valid score is limited to 0 and 1. if(score != 0 && score != 1) { throw new Exceptions.InvalidRatingScoreException(); } if(currentRating != null && currentRating.Score == score) { // an equal score already exists: noop return; } RatingBE rating = new RatingBE(); rating.ResourceId = page.ID; rating.ResourceType = ResourceBE.Type.PAGE; rating.ResourceRevision = page.Revision; rating.Timestamp = DreamContext.Current.StartTime; rating.TimestampReset = null; rating.UserId = user.ID; rating.Score = score.Value; // Set a new rating DbUtils.CurrentSession.Rating_Insert(rating); } // Trigger a notification DekiContext.Current.Instance.EventSink.PageRated(DreamContext.Current.StartTime, page, user); }
public static DreamMessage BuildDeletedPageContents(uint pageid) { ArchiveBE page = DbUtils.CurrentSession.Archive_GetPageHeadById(pageid); if (page == null) { throw new DreamAbortException(DreamMessage.NotFound(string.Format(DekiResources.RESTORE_PAGE_ID_NOT_FOUND, pageid))); } //HACKHACKHACK MaxM: Copy data to a PageBE object since parser will not work on an ArchiveBE. ArchiveBE needs to go away. PageBE tempP = new PageBE(); tempP.Title = page.Title; tempP.SetText(page.Text); tempP.ContentType = page.ContentType; ParserResult parserResult = DekiXmlParser.Parse(tempP, ParserMode.VIEW_NO_EXECUTE); // TODO (steveb): this code is almost identical to the one in "GET:pages/{pageid}/contents"; consider merging // post process tail DekiXmlParser.PostProcessParserResults(parserResult); // wrap the result in a content tag and return it to the user XDoc result = new XDoc("content").Attr("type", parserResult.ContentType); foreach (XDoc entry in parserResult.Content.Elements) { if (entry.HasName("body")) { result.Start("body").Attr("target", entry["@target"].AsText).Value(entry.ToInnerXHtml()).End(); } else { result.Elem(entry.Name, entry.ToInnerXHtml()); } } // check if we hit a snag, which is indicated by a plain-text response if ((parserResult.ContentType == MimeType.TEXT.FullType) && (page.ContentType != MimeType.TEXT.FullType)) { // something happened during parsing return new DreamMessage(DreamStatus.NonAuthoritativeInformation, null, result); } else { return DreamMessage.Ok(result); } }
/// <summary> /// This applies permissions in a set/replace approach synonymous with PUT /// </summary> /// <param name="targetPage">Page to apply permissions to</param> /// <param name="restriction">Optional restriction mask to apply to page (and optionally to child pages)</param> /// <param name="proposedGrants">List of grants to apply to page and child pages</param> /// <param name="cascade"> /// NONE: Dont apply permissions. /// ABSOLUTE: proposedGrants are applied to root page and child pages. All grants not in the proposedGrants list are removed. /// DELTAS: proposedGrants is applied exactly to the root page. Child pages get the differences between the proposedGrants and the grants of the root page thus preserving the grants they had. /// </param> public static void ReplacePagePermissions(PageBE targetPage, RoleBE restriction, IList<GrantBE> proposedGrants, CascadeType cascade) { //Perform validation of grants. // Make sure users and groups described in grants exist. // this populates user/group object within the grant. VerifyValidUsersAndGroups(proposedGrants); // Ensure a duplicate grant isn't given for the same role multiple times to a user/grant HashGrantsByTypeGranteeIdRoleId(proposedGrants); IList<GrantBE> currentGrants, proposedAddedGrants, proposedRemovedGrants; ulong userEffectiveRights = (ulong)GetUserPermissions(DekiContext.Current.User); switch(cascade) { case CascadeType.NONE: //No cascading to children. delta(current security of page, proposed security) is applied currentGrants = DbUtils.CurrentSession.Grants_GetByPage((uint)targetPage.ID); CompareGrantSets(currentGrants, proposedGrants, out proposedAddedGrants, out proposedRemovedGrants); ApplyPermissionChange(targetPage, false, userEffectiveRights, null, proposedAddedGrants, proposedRemovedGrants, currentGrants, restriction, true); break; case CascadeType.ABSOLUTE: //Cascade proposed security set to children. ApplyPermissionChange(targetPage, true, userEffectiveRights, proposedGrants, null, null, null, restriction, true); break; case CascadeType.DELTA: //Cascade delta(current security of page, proposed security) to page and children currentGrants = DbUtils.CurrentSession.Grants_GetByPage((uint)targetPage.ID); CompareGrantSets(currentGrants, proposedGrants, out proposedAddedGrants, out proposedRemovedGrants); // Note (arnec): even if proposed add & remove are empty, we have to call this method, since restriction may need to be set and propagated. ApplyPermissionChange(targetPage, true, userEffectiveRights, null, proposedAddedGrants, proposedRemovedGrants, currentGrants, restriction, true); break; } }
public static XDoc GetCommentXmlAsAtom(IList<CommentBE> comments, XUri feedUri, PageBE page) { string title = string.Format(DekiResources.COMMENT_FOR, page.Title.AsUserFriendlyName()); XAtomFeed feed = new XAtomFeed(title, feedUri, DateTime.UtcNow); feed.AddLink(PageBL.GetUriUi(page), XAtomBase.LinkRelation.Alternate, MimeType.XHTML, null, page.Title.AsUserFriendlyName()); feed.Id = feedUri; foreach(CommentBE c in comments) { UserBE posterUser = UserBL.GetUserById(c.PosterUserId); title = c.Title; if(string.IsNullOrEmpty(title)) { title = string.Format(DekiResources.COMMENT_BY_TO, posterUser.Name, page.Title.AsUserFriendlyName()); } XAtomEntry entry = feed.StartEntry(title, c.CreateDate, (c.LastEditDate == null || c.LastEditDate == DateTime.MinValue) ? c.CreateDate : c.LastEditDate.Value); entry.Id = CommentBL.GetUri(c); entry.AddAuthor(posterUser.Name, UserBL.GetUriUiHomePage(posterUser), posterUser.Email); MimeType commentMimetype; MimeType.TryParse(c.ContentMimeType, out commentMimetype); entry.AddContent(c.Content); XUri entryLink = PageBL.GetUriUi(page).WithFragment("comment" + c.Number); entry.AddLink(entryLink, XAtomBase.LinkRelation.Alternate, null, null, null); entry.AddLink(CommentBL.GetUri(c).At("content"), XAtomBase.LinkRelation.Enclosure, commentMimetype, c.Content.Length, "content"); feed.End(); } return feed; }
public static CommentBE EditExistingComment(PageBE page, CommentBE comment, DreamMessage request, DreamContext context) { if (comment.PosterUserId != DekiContext.Current.User.ID) { PermissionsBL.CheckUserAllowed(DekiContext.Current.User, Permissions.ADMIN); } ValidateCommentText(request.ContentType, request.AsText()); comment.LastEditDate = DateTime.UtcNow; comment.LastEditUserId = DekiContext.Current.User.ID; comment.Content = request.AsText(); comment.ContentMimeType = request.ContentType.ToString(); DbUtils.CurrentSession.Comments_Update(comment); PageBL.Touch(page, comment.LastEditDate.Value); RecentChangeBL.AddCommentUpdateRecentChange(comment.LastEditDate.Value, page, DekiContext.Current.User, string.Format(DekiResources.COMMENT_EDITED, comment.Number.ToString()), comment); return comment; }
//--- Methods --- private string GetRecursiveRecentChangesTimestamp(PageBE page, bool includePages, bool includeFiles, bool includeComments, bool includeTags) { RecentChangeEntry entry = null; if(includePages && includeFiles && includeComments && includeTags) { entry = QueryPageRecentChanges(page, DateTime.MinValue, 0, 1, true, false).First(); } else { // TODO (steveb): we need to change this so that we _only_ query for the changes // we actually need rather than fetch a whole bunch and discard them var changes = QueryPageRecentChanges(page, DateTime.MinValue, 0, 100, true, false); foreach(var change in changes) { bool changed = false; if(includePages) { changed = changed || (change.Type == RC.EDIT) || (change.Type == RC.NEW) || (change.Type == RC.MOVE) || (change.Type == RC.MOVE_OVER_REDIRECT) || (change.Type == RC.PAGEDELETED) || (change.Type == RC.PAGERESTORED) || (change.Type == RC.PAGEMETA) || (change.Type == RC.GRANTS_ADDED) || (change.Type == RC.GRANTS_REMOVED) || (change.Type == RC.RESTRICTION_UPDATED); } if(includeFiles) { changed = changed || (change.Type == RC.FILE); } if(includeComments) { changed = changed || (change.Type == RC.COMMENT_CREATE) || (change.Type == RC.COMMENT_UPDATE) || (change.Type == RC.COMMENT_DELETE); } if(includeTags) { changed = changed || (change.Type == RC.MOVE) || (change.Type == RC.MOVE_OVER_REDIRECT) || (change.Type == RC.TAGS); } if(changed) { entry = change; break; } } // if no match was found, keep the timestamp of the oldest one if(entry == null) { entry = changes.Last(); } } return entry.Timestamp.ToString("yyyyMMddHHmmss"); }
public static RoleBE GetPageRestriction(PageBE page) { if(page.RestrictionID == 0) { // TODO (steveb): we assume this to be public, but there's no good way of retrieving public only return GetRestrictionByName("Public"); } return GetRestrictionById(page.RestrictionID); }
private void GetCommentFromRequest(DreamContext context, Permissions access, out PageBE page, out CommentBE comment) { page = null; comment = null; ushort commentNumber = context.GetParam<ushort>("commentnumber"); if(commentNumber != 0) { page = PageBL_AuthorizePage(context, null, Permissions.READ, false); comment = DbUtils.CurrentSession.Comments_GetByPageIdNumber(page.ID, commentNumber); } if(comment == null) { throw new CommentNotFoundException(); } }
public Yield GetNavigationFull(DreamContext context, DreamMessage request, Result<DreamMessage> response) { CheckResponseCache(context, false); PageBE page = PageBL_GetPageFromUrl(context, false); if (page.Title.IsTalk) { page = PageBL.GetPageByTitle(page.Title.AsFront()); } // check if requestest page exists, otherwise find nearest parent uint new_page_id = NavBL.NEW_PAGE_ID; ulong homepageId = DekiContext.Current.Instance.HomePageId; List<NavBE> list; bool expandableNav = context.GetParam("type", "compact").EqualsInvariantIgnoreCase("expandable"); // check if a page was found if(page.ID == 0) { // find nearest ancestor and authorize access PageBE ancestor = page; while(!ancestor.Title.IsHomepage) { // fetch ancestor page based on title ulong id = DbUtils.CurrentSession.Nav_GetNearestParent(ancestor.Title); ancestor = PageBL.GetPageById(id); if(PermissionsBL.IsUserAllowed(DekiContext.Current.User, ancestor, Permissions.BROWSE)) { break; } // determine parent page title Title title = ancestor.Title.GetParent(); if(title == null) { // current ancestor was the homepage break; } ancestor = new PageBE { Title = title }; } if(ancestor.ID == 0) { ancestor = PageBL.GetHomePage(); } list = NavBL.QueryNavTreeData(ancestor, context.Culture, expandableNav).ToList(); // find the nearest parent node and increase its child count foreach(NavBE nearestAncestors in list) { if(nearestAncestors.Id == ancestor.ID) { nearestAncestors.ChildCount = nearestAncestors.ChildCount + 1; break; } } // for each missing node, generate a dummy page and insert it into result set ulong ancestor_id = ancestor.ID; string[] ancestor_segments = ancestor.Title.AsUnprefixedDbSegments(); string[] new_page_segments = page.Title.AsUnprefixedDbSegments(); List<NavBE> newNodes = new List<NavBE>(32); for(int i = ancestor_segments.Length; i < new_page_segments.Length; ++i) { string title = string.Join("/", new_page_segments, 0, i + 1); // create dummy node with <page><page_id /><page_namespace /><page_title ><page_parent /><page_children /></page> NavBE newPage = new NavBE { Id = new_page_id, NameSpace = (ushort)page.Title.Namespace, Title = title, ParentId = (ancestor_id == homepageId) ? 0 : ancestor_id, ChildCount = (i == new_page_segments.Length - 1) ? 0 : 1 }; newNodes.Add(newPage); // update page information page.ID = new_page_id; page.ParentID = ancestor_id; ancestor_id = new_page_id++; } // check if we need to remove the children nodes of the ancestor ancestor_id = (ancestor.ID == homepageId) ? 0 : (uint)ancestor.ID; if(!expandableNav && (new_page_segments.Length - ancestor_segments.Length) > 1) { // remove ancestor children and add new dummy nodes for(int start = 0; start < list.Count; ++start ) { // check if we found a matching child if((list[start].ParentId == ancestor_id) && (list[start].Id != homepageId)) { // look for last child to remove so we can remove an entire range at once int end = start + 1; for(; (end < list.Count) && (list[end].ParentId == ancestor_id) && (list[end].Id != homepageId); ++end) { } list.RemoveRange(start, end - start); --start; } } } else { // find where among the ancestor children we need to insert the dummy node for(int index = 0; index < list.Count; ++index) { NavBE current = list[index]; if((current.ParentId == ancestor_id) && (current.Id != homepageId)) { string[] parts = Title.FromDbPath(NS.UNKNOWN, current.Title, current.DisplayName).AsUnprefixedDbSegments(); if((parts.Length > 0) && (new_page_segments.Length > 0) && (string.Compare(parts[parts.Length - 1], new_page_segments[parts.Length - 1], true, context.Culture) > 0)) { // found the spot list.InsertRange(index, newNodes); newNodes = null; break; } } } } // check if we didn't find the spot if(newNodes != null) { // add new nodes to the end list.AddRange(newNodes); } } else { list = NavBL.QueryNavTreeData(page, context.Culture, expandableNav).ToList(); } // find first parent ulong parent_id = homepageId; int parent_index = -1; for(int i = 0; i < list.Count; ++i) { if(list[i].Id == parent_id) { parent_index = i; break; } } if(parent_index == -1) { throw new Exception("unexpected [homepage not found]"); } // add any missing ancestor nodes (might have been removed by permission check or might simply not exist) string[] page_segments = page.Title.AsUnprefixedDbSegments(); ushort ns = (ushort)page.Title.Namespace; for(int i = 0; i <= page_segments.Length; ++i) { string title = string.Join("/", page_segments, 0, i); // loop over all nodes bool found = false; for(int j = 0; j < list.Count; ++j) { NavBE node = list[j]; // NOTE (steveb): we walk the path one parent at a time; however, there are a few special cases, because of namespaces // for instance, the parent page of User:Bob is the homepage (ditto for Template:Page), but the parent page of Admin:Config is Admin: // check if we found a node matching the current title if((string.Compare(node.Title, title, true, context.Culture) == 0) && (((i == 0) && (ns != (ushort)NS.ADMIN)) ? (node.NameSpace == (ushort)NS.MAIN) : (node.NameSpace == ns))) { found = true; // let's make sure node is pointing to right parent node.ParentId = (parent_id == homepageId) ? 0 : parent_id; parent_id = node.Id; parent_index = j; break; } } if(!found) { // node is missing, let's create a new one NavBE newPage = new NavBE { Id = new_page_id, NameSpace = (ushort)page.Title.Namespace, Title = title, ParentId = (parent_id == homepageId) ? 0 : parent_id, ChildCount = 1 }; // add new page after parent list.Insert(parent_index + 1, newPage); parent_id = new_page_id++; parent_index = parent_index + 1; } } // build response if(ShowDebug(context)) { response.Return(DreamMessage.Ok(NavBL.ConvertNavPageListToDoc(list))); } else { XDoc result = expandableNav ? NavBL.ComputeExpandableNavigationDocument(list, page, 0, 0, false) : NavBL.ComputeNavigationDocument(list, page, 0, 0, false, context.GetParam("width", int.MaxValue)); if(ShowXml(context)) { response.Return(DreamMessage.Ok(result)); } else { response.Return(DreamMessage.Ok(new XDoc("tree").Value(result.Contents))); } } yield break; }
public static PageBE[] FilterDisallowed(UserBE user, ICollection<PageBE> pages, bool throwException, bool allowApiKey, bool applyBanMask, out PageBE[] filteredOutPages, params Permissions[] actions) { IEnumerable<ulong> filtered; var pageLookup = pages.Distinct(x => x.ID).ToDictionary(x => x.ID); var allowed = FilterDisallowed(user, pages.Select(x => x.ID), throwException, allowApiKey, applyBanMask, out filtered, actions); filteredOutPages = filtered.Select(x => pageLookup[x]).ToArray(); return allowed.Select(x => pageLookup[x]).ToArray(); }
public static PageBE[] FilterDisallowed(UserBE user, ICollection<PageBE> pages, bool throwException, out PageBE[] filteredOutPages, params Permissions[] actions) { return FilterDisallowed(user, pages, throwException, true, true, out filteredOutPages, actions); }
private static bool CheckUserAllowed(UserBE user, PageBE page, bool throwException, params Permissions[] actions) { if(user == null || page == null) return false; PageBE[] allowedPages = FilterDisallowed(user, new PageBE[] { page }, throwException, true, true, actions); return (allowedPages == null || allowedPages.Length == 0) ? false : true; }
public static void CheckUserAllowed(UserBE user, PageBE page, params Permissions[] actions) { CheckUserAllowed(user, page, true, actions); }
public static bool IsUserAllowed(UserBE user, PageBE page, params Permissions[] actions) { return CheckUserAllowed(user, page, false, actions); }
public static void DeleteComment(PageBE page, CommentBE comment) { if (!comment.IsCommentMarkedAsDeleted) { comment.DeleteDate = DateTime.UtcNow; comment.DeleterUserId = DekiContext.Current.User.ID; DbUtils.CurrentSession.Comments_Update(comment); PageBL.Touch(page, comment.DeleteDate.Value); RecentChangeBL.AddCommentDeleteRecentChange(comment.DeleteDate.Value, page, DekiContext.Current.User, string.Format(DekiResources.COMMENT_DELETED, comment.Number.ToString()), comment); DekiContext.Current.Instance.EventSink.CommentDelete(DreamContext.Current.StartTime, comment, page, DekiContext.Current.User); } }
public virtual ResourceBE Delete(ResourceBE resource, PageBE parentPage, uint changeSetId) { //Build the new revision ResourceBE res = BuildRevForRemove(resource, DateTime.UtcNow, changeSetId); //Update db res = SaveResource(res); //Update indexes and parent page's timestamp //TODO MaxM: Changesink needs to accept a resource if(res.ResourceType == ResourceBE.Type.FILE) { DekiContext.Current.Instance.EventSink.AttachmentDelete(DekiContext.Current.Now, res, DekiContext.Current.User); // Recent changes RecentChangeBL.AddFileRecentChange(DekiContext.Current.Now, parentPage, DekiContext.Current.User, DekiResources.FILE_REMOVED(res.Name), changeSetId); } if(parentPage != null) { PageBL.Touch(parentPage, DateTime.UtcNow); } return res; }
public static ulong CalculateEffectivePageRights(PageBE page, UserBE user) { //TODO (MaxM): This should be optimized to not make a direct db call but should instead calculate effective permissions based on // Page.Grants, Page.Restriction, user.PermissionMask ulong effectivePermissions = CalculateEffectiveForUserPage(user, page.ID); return CalculatePermissionReduction(user, effectivePermissions); }
public virtual ResourceBE BuildRevForRestore(ResourceBE currentResource, PageBE targetPage, string resourceName, uint changeSetId) { ResourceBE newRev = BuildResourceRev(currentResource); newRev.ResourceIsDeleted = false; newRev.ChangeSetId = changeSetId; newRev.ParentPageId = (uint)targetPage.ID; newRev.Name = resourceName; newRev.ChangeMask = newRev.ChangeMask | ResourceBE.ChangeOperations.DELETEFLAG; return newRev; }
public void MoveFile(ResourceBE attachment, PageBE targetPage) { CheckDisposed(); //Nothing to do here. }
public static List<GrantBE> ReadGrantsXml(XDoc grantsXml, PageBE page, bool ignoreInvalid) { List<GrantBE> grants = new List<GrantBE>(); GrantBE g = null; if(!grantsXml.IsEmpty) { if(grantsXml.HasName("grant")) { try { g = ReadGrantXml(grantsXml, page); } catch(ArgumentException x) { if(!ignoreInvalid) { throw new PermissionsGrantParseInvalidArgumentException(x.Message); } } if(g != null) { grants.Add(g); } } //If rootnode is grants, pawn off inner xml to GrantFromXml in a loop. else if(grantsXml.HasName("grants") || grantsXml.HasName("grants.added") || grantsXml.HasName("grants.removed")) { foreach(XDoc grantXml in grantsXml["grant"]) { try { g = ReadGrantXml(grantXml, page); } catch(ArgumentException x) { if(!ignoreInvalid) { throw new PermissionsGrantParseInvalidArgumentException(x.Message); } } if(g != null) { grants.Add(g); } } } } return grants; }
public virtual ResourceBE BuildRevForMoveAndRename(ResourceBE currentResource, PageBE targetPage, string name, uint changeSetId) { ResourceBE newRev = BuildResourceRev(currentResource); //NOTE MaxM: This logic exists here since BuildResourceRev clears out fields preventing chaining of entity building for separate actions on one revision if(targetPage != null && (uint)targetPage.ID != newRev.ParentPageId.Value) { newRev.ParentPageId = (uint)targetPage.ID; newRev.ChangeMask |= ResourceBE.ChangeOperations.PARENT; } if(name != null && !StringUtil.EqualsInvariant(name, currentResource.Name)) { newRev.Name = name; newRev.ChangeMask |= ResourceBE.ChangeOperations.NAME; } newRev.ChangeSetId = changeSetId; return newRev; }
public static void DeleteAllGrantsForPage(PageBE page) { DbUtils.CurrentSession.Grants_DeleteByPage(new List<ulong>() { page.ID }); RecentChangeBL.AddGrantsRemovedRecentChange(DateTime.UtcNow, page, DekiContext.Current.User, DekiResources.GRANT_REMOVED_ALL()); }
public XUri GetUriCanonical(PageBE page) { throw new NotImplementedException(); }
private static GrantBE ReadGrantXml(XDoc grantXml, PageBE pg) { GrantBE grant = new GrantBE(); if(grantXml["user/@id"].Contents != string.Empty) { grant.UserId = DbUtils.Convert.To<uint>(grantXml["user/@id"].Contents, 0); grant.Type = GrantType.USER; } if(grantXml["group/@id"].Contents != string.Empty) { grant.GroupId = DbUtils.Convert.To<uint>(grantXml["group/@id"].Contents, 0); grant.Type = GrantType.GROUP; } //Userid nor Groupid given or both given is invalid. if((grant.UserId == 0 && grant.GroupId == 0) || (grant.UserId != 0 && grant.GroupId != 0)) { throw new PermissionsUserOrGroupIDNotGivenInvalidArgumentException(); } if(grantXml["permissions/role"].Contents == string.Empty) throw new PermissionsRoleNotGivenInvalidArgumentException(); RoleBE r = GetRoleByName(grantXml["permissions/role"].Contents); if(r == null) throw new PermissionsUnrecognizedRoleInvalidArgumentRoleException(); grant.Role = r; grant.RoleId = r.ID; //Optional datetime expire field. If provided but unparsable, return null. string expireString = grantXml["date.expires"].Contents; if(expireString != string.Empty) { DateTime expirationDate; if(!DateTime.TryParse(expireString, out expirationDate)) { throw new PermissionsExpiryParseInvalidArgumentException(); } else { grant.ExpirationDate = expirationDate; } } grant.PageId = (uint)pg.ID; grant.CreatorUserId = DekiContext.Current.User.ID; return grant; }
private IEnumerable<RecentChangeEntry> QueryPageRecentChanges(PageBE page, DateTime since, int offset, int count, bool recurse, bool createOnly) { return ConvertAndFilterXmlToRecentChanges(DbUtils.CurrentSession.RecentChanges_GetPageRecentChanges(page, since, recurse, createOnly, MAX_RECENT_CHANGES), offset, count, true); }
public Dictionary<uint, IList<PageBE>> Tags_GetRelatedPages(IList<uint> tagids) { // retrieve a map of tag id to pages // each define tag maps to a list of related pages and each text tag maps to its defining page (if one exists) Dictionary<uint, IList<PageBE>> result = new Dictionary<uint, IList<PageBE>>(); if(0 < tagids.Count) { string tagIdsText = string.Join(",", DbUtils.ConvertArrayToDelimittedString<uint>(',', tagids)); //TODO MaxM: This query is causing quite a bit of db load and needs to be optimized Catalog.NewQuery(string.Format(@" /* Tags_GetRelatedPages */ SELECT requested_tag_id as tag_id, page_id, page_title, page_namespace, page_display_name FROM pages JOIN tag_map ON page_id = tagmap_page_id JOIN (SELECT requestedtags.tag_id as requested_tag_id, tags.tag_id from tags JOIN tags as requestedtags ON tags.tag_name = requestedtags.tag_name WHERE ( ((tags.tag_type = 3 AND requestedtags.tag_type = 0) OR (tags.tag_type = 0 AND requestedtags.tag_type = 3)) AND requestedtags.tag_id IN ({0}))) relatedtags ON tagmap_tag_id = tag_id;" , tagIdsText)) .Execute(delegate(IDataReader dr) { while(dr.Read()) { // extract the tag to page mapping uint tagid = DbUtils.Convert.To<UInt32>(dr["tag_id"], 0); PageBE page = new PageBE(); page.ID = DbUtils.Convert.To<UInt32>(dr["page_id"], 0); page.Title = DbUtils.TitleFromDataReader(dr, "page_namespace", "page_title", "page_display_name"); // add the mapping into the collection IList<PageBE> relatedPages; if(!result.TryGetValue(tagid, out relatedPages)) { relatedPages = new List<PageBE>(); result[tagid] = relatedPages; } relatedPages.Add(page); } }); } return result; }