WebInfo FindWeb(WebInfo web, string[] parts, ref int index, bool skipWildcards) { // We continue appending the current part of the path and advancing to the next one as // long as we succeed in resolving a web with that path. The last successful result is // returned and if the already the first part failed the starting web is returned. try { do { var name = parts[index]; if (skipWildcards && name == "*") { break; } web = GetWeb(PathUtility.JoinPath(web.Path, name)); } while (++index < parts.Length); } catch {} return(web); }
// Implementation of the rest of the NavigatingConnector interface. public override IEnumerable <WebInfo> GetWebs(WebInfo web) { if (web == null) { throw new ArgumentNullException("web"); } // I wouldn't so aggressively store the child webs but calling Get-ChildItem cmdlet // checks multiple times the existence of a "*" child. Because this triggers the // children retrieval I'd better cache them. The cache stores resolved SharePoint // objects by their path. I could adapt the cache to accommodate the collections of // children too but it was easier to utilize an internal property in the parent web // instance, which even performs better. if (web.Webs == null || !Cache.Check()) { web.Webs = GetWebsDirectly(web); } return(web.Webs); }
public ListInfo AddList(WebInfo web, ListCreationParameters parameters) { if (web == null) { throw new ArgumentNullException("web"); } if (parameters == null) { throw new ArgumentNullException("parameters"); } parameters.Check(); var list = AddListDirectly(web, parameters); if (web.Lists != null) { web.Lists = web.Lists.Concat(new[] { list }).ToList(); } return(list); }
public WebInfo AddWeb(WebInfo web, WebCreationParameters parameters) { if (web == null) { throw new ArgumentNullException("web"); } if (parameters == null) { throw new ArgumentNullException("parameters"); } parameters.Check(); var newWeb = AddWebDirectly(web, parameters); if (web.Webs != null) { web.Webs = web.Webs.Concat(new[] { newWeb }).ToList(); } return(newWeb); }
// Implementation of the ModifyingConnector interface support which performs modifications // by calling SharePoint web services. protected override void RemoveWebDirectly(WebInfo web) { // A web cannot be deleted if it contains any child webs. All child webs have to be // deleted before the parent web. Let's get all webs on the entire site collection // and filter out all ancestor webs which shouldn't be deleted. var webUrl = PathUtility.JoinPath(Drive.WebUrl, web.Path); var webs = GetAllWebs(web).Where(item => item.StartsWithCI(webUrl + "/")); webs = webs.Concat(new[] { webUrl }); foreach (var child in webs.OrderByDescending(item => item.Length)) { var childPath = child.Substring(webUrl.Length).TrimStart('/'); childPath = PathUtility.JoinPath(web.Path, childPath); // Deleting a web site can be done by Sites.DeleteWeb() but this method is not // available in SharePoint 2007. Deleting document workspace - which is actually // a web - is a nice alternative working on any SharePoint server version. Log.Verbose("Removing web at /{0}.", childPath); GetService <Dws>(childPath).DeleteDws(); } }
ListInfo CreateListInfo(WebInfo web, XmlElement source) { var path = PathUtility.JoinPath(web.Path, GetListName(source)); var list = new ListInfo(web, path); list.ID = GetListID(source); list.Title = GetListTitle(source); list.Created = GetListCreated(source); list.LastModified = GetListLastModified(source); list.LastDeleted = GetListLastDeleted(source); list.ItemCount = GetListItemCount(source); if (HasListFields(source)) { list.Fields = GetListFields(source).ToList(); } if (web.ID.IsEmpty()) { web.ID = GetListWebID(source); } FinalizeInfo(list, source); return(list); }
public IEnumerable<WebFolderInfo> GetWebFolders(WebInfo web) { if (web == null) throw new ArgumentNullException("web"); // Web folders are artificial objects that cannot be queried by the SharePoint object // model. They can contain only lists and because all lists on a web can be obtained // we infer web folders by cutting parent container names from list relative paths. var names = GetAllLists(web).Select(list => PathUtility.GetParentPath(list.WebRelativePath)). Distinct(ConfigurableComparer<string>.CaseInsensitive).Where(name => name.Any()); return names.Select(name => InferWebFolder(web, name)).ToList(); }
protected abstract WebFolderInfo InferWebFolder(WebInfo web, string name);
public ListInfo(WebInfo web, string path) : base(web, path) { if (web == null) throw new ArgumentNullException("web"); Web = web; }
protected abstract IEnumerable <XmlElement> QueryLists(WebInfo web);
// Abstract methods to support the direct SharePoint object retrieval. They must call the // Finalizeinfo method so that the returned object is correctly put to the cache. protected abstract IEnumerable<WebInfo> GetWebsDirectly(WebInfo web);
// Abstract methods modifying the actual SharePoint objects. protected abstract void RemoveWebDirectly(WebInfo web);
protected abstract ListInfo AddListDirectly(WebInfo web, ListCreationParameters parameters);
public ListInfo AddList(WebInfo web, ListCreationParameters parameters) { if (web == null) throw new ArgumentNullException("web"); if (parameters == null) throw new ArgumentNullException("parameters"); parameters.Check(); var list = AddListDirectly(web, parameters); if (web.Lists != null) web.Lists = web.Lists.Concat(new[] { list }).ToList(); return list; }
Info GetListOrWebFolder(WebInfo web, string name) { // Direct children of a web are either web folders or lists. Although the GetAllLists // methods returns the same count of objects - web folders are inferred from lists // which are not placed directly below the web - we want to walk through objects that // are placed directly below the web. var folders = GetWebFolders(web).Cast<Info>(); var lists = GetLists(web).Cast<Info>(); var listOrFolder = folders.Concat(lists).FirstOrDefault(item => item.Name.EqualsCI(name)); if (listOrFolder == null) throw new ApplicationException("No list or web folder found."); return listOrFolder; }
protected abstract WebInfo AddWebDirectly(WebInfo web, WebCreationParameters parameters);
XmlElement GetWebXml(WebInfo web) { return(QueryWeb(web.Path)); }
protected override XmlElement RawAddList(WebInfo web, ListCreationParameters parameters) { Log.Verbose("Adding the list {0} to /{1}.", parameters.Name, web.Path); return((XmlElement)GetService <Lists>(web.Path).AddList( parameters.Name, parameters.Description, parameters.Template)); }
// Implementation of the rest of the NavigatingConnector interface. public override IEnumerable<WebInfo> GetWebs(WebInfo web) { if (web == null) throw new ArgumentNullException("web"); // I wouldn't so aggressively store the child webs but calling Get-ChildItem cmdlet // checks multiple times the existence of a "*" child. Because this triggers the // children retrieval I'd better cache them. The cache stores resolved SharePoint // objects by their path. I could adapt the cache to accommodate the collections of // children too but it was easier to utilize an internal property in the parent web // instance, which even performs better. if (web.Webs == null || !Cache.Check()) web.Webs = GetWebsDirectly(web); return web.Webs; }
WebFolderInfo CreateWebFolderInfo(WebInfo web, string path) { var folder = new WebFolderInfo(web, path); folder.Title = ""; FinalizeInfo(folder); return folder; }
protected override IEnumerable<ListInfo> GetAllLists(WebInfo web) { // The cache stores resolved SharePoint objects by their path. I could adapt the // cache to accommodate the collections of children too but it was easier to utilize // an internal property in the parent web instance, which even performs better. if (web.Lists == null || !Cache.Check()) web.Lists = GetAllListsDirectly(web); return web.Lists; }
// Methods creating informational objects to be returned by the drive provider which will // represent particular SharePoint objects in the PowerShell space. They are responsible // to add the resulting object to the cache by calling the parent's FinalizeInfo. WebInfo CreateWebInfo(WebInfo web, XmlElement source) { var path = PathUtility.JoinPath(web.Path, GetWebName(source)); return CreateWebInfo(path, source); }
protected abstract WebFolderInfo InferWebFolderDirectly(WebInfo web, string path);
WebInfo CreateWebInfo(string path, XmlElement source) { var web = new WebInfo(this, path); web.ID = GetWebID(source); web.Title = GetWebTitle(source); FinalizeInfo(web, source); return web; }
protected abstract IEnumerable <ListInfo> GetAllLists(WebInfo web);
protected override ListInfo AddListDirectly(WebInfo web, ListCreationParameters parameters) { return CreateListInfo(web, RawAddList(web, parameters)); }
protected override WebInfo AddWebDirectly(WebInfo web, WebCreationParameters parameters) { var path = PathUtility.JoinPath(web.Path, parameters.Name); return CreateWebInfo(path, RawAddWeb(web, parameters)); }
// Methods creating informational objects to be returned by the drive provider which will // represent particular SharePoint objects in the PowerShell space. They are responsible // to add the resulting object to the cache by calling the parent's FinalizeInfo. WebInfo CreateWebInfo(WebInfo web, XmlElement source) { var path = PathUtility.JoinPath(web.Path, GetWebName(source)); return(CreateWebInfo(path, source)); }
protected override IEnumerable<ListInfo> GetAllListsDirectly(WebInfo web) { return QueryLists(web).Select(source => CreateListInfo(web, source)).ToList(); }
// Implementation of the NavigatingConnector. Methods retrieving lists and web folders // can be implemented here already because of the protected abstract GetAllLists method. // The SharePoint object model doesn't offer getter for lists by their URL and that's why // even getting a single web folder or list is performed by enumerating all lists of the // web. Descendants are encouraged to cache the lists at least for a single PowerShell // operation because the GetAllLists can be called multiple times even within this class. public abstract IEnumerable<WebInfo> GetWebs(WebInfo web);
// Implementation of the direct object information retrieval which will be recognized in // the GetXxx methods below to extract concrete object properties from. protected override IEnumerable<XmlElement> QueryWebs(WebInfo web) { return GetWebXml(web).SelectNodes("Web").OfType<XmlElement>(); }
WebInfo FindWeb(WebInfo web, string[] parts, ref int index, bool skipWildcards) { // We continue appending the current part of the path and advancing to the next one as // long as we succeed in resolving a web with that path. The last successful result is // returned and if the already the first part failed the starting web is returned. try { do { var name = parts[index]; if (skipWildcards && name == "*") break; web = GetWeb(PathUtility.JoinPath(web.Path, name)); } while (++index < parts.Length); } catch {} return web; }
protected override XmlElement RawAddList(WebInfo web, ListCreationParameters parameters) { var target = GetWebXml(web); if (HasWebXml(target, parameters.Name)) throw new ApplicationException("Web with the same name found."); if (HasWebFolderXml(target, parameters.Name)) throw new ApplicationException("WebFolder with the same name found."); if (HasListXml(target, parameters.Name)) throw new ApplicationException("List with the same name found."); var source = target.OwnerDocument.CreateElement("List"); source.SetAttribute("ID", Guid.NewGuid().ToString("D")); source.SetAttribute("Name", parameters.Name); if (string.IsNullOrEmpty(parameters.Description)) source.SetAttribute("Description", parameters.Description); source.SetAttribute("Template", parameters.Template.ToStringI()); source.SetAttribute("Created", DateForNow); target.AppendChild(source); SaveSite(target); return source; }
protected override IEnumerable <XmlElement> QueryLists(WebInfo web) { return(GetWebXml(web).SelectNodes("List").OfType <XmlElement>()); }
protected override XmlElement RawAddWeb(WebInfo web, WebCreationParameters parameters) { var target = GetWebXml(web); if (HasWebXml(target, parameters.Name)) throw new ApplicationException("Web with the same name found."); if (HasWebFolderXml(target, parameters.Name)) throw new ApplicationException("WebFolder with the same name found."); if (HasListXml(target, parameters.Name)) throw new ApplicationException("List with the same name found."); var source = target.OwnerDocument.CreateElement("Web"); source.SetAttribute("ID", Guid.NewGuid().ToString("D")); source.SetAttribute("Name", parameters.Name); source.SetAttribute("Title", parameters.Title); if (string.IsNullOrEmpty(parameters.Description)) source.SetAttribute("Description", parameters.Description); source.SetAttribute("Template", parameters.Template); if (parameters.Language > 0) source.SetAttribute("Language", parameters.Language.ToStringI()); if (parameters.Locale > 0) source.SetAttribute("Locale", parameters.Locale.ToStringI()); if (parameters.CollationLocale > 0) source.SetAttribute("CollationLocale", parameters.CollationLocale.ToStringI()); if (parameters.UniquePermissions.HasValue) source.SetAttribute("UniquePermissions", parameters.UniquePermissions.Value.ToStringI()); if (parameters.Anonymous.HasValue) source.SetAttribute("Anonymous", parameters.Anonymous.Value.ToStringI()); if (parameters.Presence.HasValue) source.SetAttribute("Presence", parameters.Presence.Value.ToStringI()); source.SetAttribute("Created", DateForNow); target.AppendChild(source); SaveSite(target); return source; }
public WebFolderInfo(WebInfo web, string path) : base(web, path) { Web = web; }
// Implementation of the ModifyingConnector interface support which performs modifications // in the in-memory XML document initialized from assembly resources. protected override void RemoveWebDirectly(WebInfo web) { var source = GetWebXml(web); var parent = (XmlElement) source.ParentNode; source.Remove(); SaveSite(parent); }
public WebInfo AddWeb(WebInfo web, WebCreationParameters parameters) { if (web == null) throw new ArgumentNullException("web"); if (parameters == null) throw new ArgumentNullException("parameters"); parameters.Check(); var newWeb = AddWebDirectly(web, parameters); if (web.Webs != null) web.Webs = web.Webs.Concat(new[] { newWeb }).ToList(); return newWeb; }
XmlElement GetWebXml(WebInfo web) { return QueryWeb(web.Path); }
// Implementation of the ModifyingConnector interface. The methods remove the existing // item from cache, call the descendant to perform the actual operation and/or return // the new object similarly to the item getting and listing methods. public void RemoveWeb(WebInfo web) { if (web == null) throw new ArgumentNullException("web"); Cache.RemoveObject(web); if (web.Webs != null) web.Webs = web.Webs.Where(item => item.ID != web.ID).ToList(); RemoveWebDirectly(web); }
protected override WebInfo AddWebDirectly(WebInfo web, WebCreationParameters parameters) { var path = PathUtility.JoinPath(web.Path, parameters.Name); return(CreateWebInfo(path, RawAddWeb(web, parameters))); }
protected override ListInfo AddListDirectly(WebInfo web, ListCreationParameters parameters) { return(CreateListInfo(web, RawAddList(web, parameters))); }
protected abstract IEnumerable<ListInfo> GetAllListsDirectly(WebInfo web);
protected abstract XmlElement RawAddWeb(WebInfo web, WebCreationParameters parameters);
protected override WebFolderInfo InferWebFolder(WebInfo web, string name) { var path = PathUtility.JoinPath(web.Path, name); return (WebFolderInfo) Cache.GetObjectOrDefault(path) ?? InferWebFolderDirectly(web, name); }
protected abstract XmlElement RawAddList(WebInfo web, ListCreationParameters parameters);
// Implementation of the rest of the NavigatingConnector interface. protected override IEnumerable <WebInfo> GetWebsDirectly(WebInfo web) { return(QueryWebs(web).Select(source => CreateWebInfo(web, source)).ToList()); }
// Implementation of the NavigatingConnector. Methods retrieving lists and web folders // can be implemented here already because of the protected abstract GetAllLists method. // The SharePoint object model doesn't offer getter for lists by their URL and that's why // even getting a single web folder or list is performed by enumerating all lists of the // web. Descendants are encouraged to cache the lists at least for a single PowerShell // operation because the GetAllLists can be called multiple times even within this class. public abstract IEnumerable <WebInfo> GetWebs(WebInfo web);
protected override IEnumerable <ListInfo> GetAllListsDirectly(WebInfo web) { return(QueryLists(web).Select(source => CreateListInfo(web, source)).ToList()); }
protected override WebFolderInfo InferWebFolderDirectly(WebInfo web, string name) { return(CreateWebFolderInfo(web, PathUtility.JoinPath(web.Path, name))); }
// Abstract methods to support the direct SharePoint object retrieval. They must call the // Finalizeinfo method so that the returned object is correctly put to the cache. protected abstract IEnumerable <WebInfo> GetWebsDirectly(WebInfo web);