private List <string> ExtractCompleteSPOElements(ClientContext context, Web extractionWeb, ILogger logger) { try { List <string> extractedCompleteFiles = new List <string>(); string templateName = string.Format("{0}.--.complete-template.xml", extractionWeb.Title); WebCollection webs = extractionWeb.Webs; context.Load(webs); context.ExecuteQuery(); //ExtractCompleteSPOWebElements(context, extractionWeb, templateName, logger); foreach (Web web in webs) { templateName = string.Format("{0}.--.complete-template.xml", web.Title); //ExtractCompleteSPOWebElements(context, web, templateName, logger); } logger.LogMessage("Working on splitting template files..."); return(extractedCompleteFiles); } catch (Exception ex) { logger.LogMessage(string.Format(Constants.UnknownError, ex.Message), LogType.ErrorAndAbort); } return(null); }
////////public bool AddList(string name, string type) ////////{ //////// return false; ////////} ////////public bool RemoveList(string name, string type) ////////{ //////// return false; ////////} /// <summary> /// Load all Webs for current object. /// </summary> private void LoadWebs() { if (this.webs == null) { this.webs = new WebCollection(this.Context, new Uri(this.Url)); } }
/// <summary> /// Returns the complete site structure. Depending on parameter values, the structure may omit list items and permission nodes /// </summary> /// <param name="url">Url of the site collection to connect to</param> /// <param name="bgw">Background worker which invoked this method. Reference used to raise progress events</param> /// <param name="AnalyzeListItems">Returns List Item level nodes in the structure, if true</param> /// <param name="GetFullPermissionStructure">Returns permission nodes for objects with broken inheritance, if set to true</param> /// <param name="PreLoadPermissions">Loads permissions on the site as a first step. Has to be true if full permission structure is being fetched</param> /// <returns>SPSecurable object instance representing the entire site collection</returns> public static SPSecurableObject GetStructure(string url, BackgroundWorker bgw, bool AnalyzeListItems, bool GetFullPermissionStructure, bool PreLoadPermissions, bool LoadAllItems) { bgwWorker = bgw; bAnalyzeListItems = AnalyzeListItems; bGetFullPermissionStructure = GetFullPermissionStructure; bLoadAllItems = LoadAllItems; using (ClientContext context = new ClientContext(url)) { if (PreLoadPermissions) { LoadSiteGroups(context); } Site site = context.Site; Web rootWeb = site.RootWeb; context.Load(site); context.Load(rootWeb); WebCollection webCollection = rootWeb.Webs; context.Load(webCollection); context.ExecuteQuery(); TotalWebCount = webCollection.Count; ProcessWebs(context, site, rootWeb, ref TopSecurableObject); } return(TopSecurableObject); }
public void LoadControl() { ClientContext clientContext = new ClientContext(URL); Web currentWeb = clientContext.Web; clientContext.Load(currentWeb); clientContext.ExecuteQuery(); WebCollection childWebs = currentWeb.Webs; clientContext.Load(childWebs); clientContext.ExecuteQuery(); foreach (Web tmpWeb in childWebs) { TreeNode tmpNode = this.tvStructure.Nodes.Add(tmpWeb.Title); clientContext.Load(tmpWeb); clientContext.ExecuteQuery(); //WebCollection tmpChildWebs = tmpWeb.Webs; //clientContext.Load(tmpChildWebs); //clientContext.ExecuteQuery(); //if(tmpChildWebs.Count > 0) //{ //} } }
public SPClientWeb IncludeWebs(params Expression<Func<WebCollection, object>>[] retrievals) { WebsForCurrentUser = Web.GetSubwebsForCurrentUser(null); Web.Context.Load(WebsForCurrentUser, retrievals); _executeQuery = true; return this; }
private void CallAnotherSite(string siteUrl) { var siteUri = new Uri(siteUrl); string realm = TokenHelper.GetRealmFromTargetUrl(siteUri); string token = TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, siteUri.Authority, realm).AccessToken; using (var ctx = TokenHelper.GetClientContextWithAccessToken(siteUri.ToString(), token)) { var web = ctx.Web; ctx.Load(web); ProcessRoleAssignments(web, ctx); WebCollection webs = web.Webs; ctx.Load <WebCollection>(webs); ctx.ExecuteQuery(); outPutText += web.Url + "<br/>"; foreach (var subWeb in webs) { outPutText += subWeb.Url + "<br/>"; ProcessRoleAssignments(subWeb, ctx); } } }
private void CreateSiteStructure(ClientContext ctx, WebCollection webs, Web rootWeb, string parentFolder, ILogger logger) { try { string rootFolder = string.Format("{0}\\..\\..\\{1}\\", Environment.CurrentDirectory, parentFolder); //rename root directory if (!System.IO.Directory.Exists(rootFolder)) { System.IO.Directory.CreateDirectory(rootFolder); if (rootWeb.ParentWeb.ServerObjectIsNull.HasValue && rootWeb.ParentWeb.ServerObjectIsNull.Value) { //This is the root site //Move template folders into the current directory DirectoryInfo dirInfo = new DirectoryInfo(string.Format("{0}\\..\\..\\{1}\\", Environment.CurrentDirectory, "Template Store")); ProcessDirectories(dirInfo, rootFolder); } } foreach (var folder in ConfigData.Folders) { if (!System.IO.Directory.Exists(string.Concat(rootFolder, folder.Value))) { System.IO.Directory.CreateDirectory(string.Concat(rootFolder, folder.Value)); } } ExtractCompleteSPOWebElements(ctx, rootWeb, string.Format("{0}.--.complete-template.xml", rootWeb.Title), rootFolder, logger); if (webs.Count > 0) { parentFolder += "\\SubSites\\"; if (!System.IO.Directory.Exists(string.Format("{0}\\..\\..\\{1}\\", Environment.CurrentDirectory, parentFolder))) { System.IO.Directory.CreateDirectory(string.Format("{0}\\..\\..\\{1}\\", Environment.CurrentDirectory, parentFolder)); } } foreach (Web subWeb in webs) { try { WebCollection subWebCollection = subWeb.Webs; ctx.Load(subWebCollection); ctx.ExecuteQuery(); parentFolder += "\\" + subWeb.Title; CreateSiteStructure(ctx, subWebCollection, subWeb, parentFolder, logger); parentFolder += "\\..\\"; } catch (Exception ex) { } } } catch (Exception ex) { logger.LogMessage(string.Concat(Constants.UnknownError, ex.Message), LogType.ErrorAndContinue); } }
public static Web GetWebByUrl(this WebCollection value, string name) { var context = value.Context; context.Load(value); context.ExecuteQuery(); return(value.First(ct => ct.Url == name)); }
private async void UploadNewCollections(object sender, object data = null) { if (!await Initalizer.WebCollectionProvider.IsCurrentKeyValid()) { _userDialogs.OkMessageBox("You need to login before uploading collections", "Error", MessageBoxType.Error); return; } var collectionList = (IList <ICollection>)data; foreach (var c in collectionList) { if (!c.AllBeatmaps().Any()) { _userDialogs.OkMessageBox("Empty collection - upload aborted", "Error", MessageBoxType.Error); return; } } var oldCollections = new Collections(); oldCollections.AddRange(collectionList); var newCollections = new Collections(); foreach (var c in collectionList) { var webCollection = new WebCollection(0, _osuFileIo.LoadedMaps, true); webCollection.Name = c.Name; webCollection.LastEditorUsername = c.LastEditorUsername; foreach (var collectionBeatmap in c.AllBeatmaps()) { webCollection.AddBeatmap(collectionBeatmap); } newCollections.AddRange(await webCollection.Save(Initalizer.WebCollectionProvider)); } _collectionEditor.EditCollection(CollectionEditArgs.RemoveCollections(oldCollections)); _collectionEditor.EditCollection(CollectionEditArgs.AddCollections(newCollections)); var sidePanel = (IOnlineCollectionList)_mainForm.SidePanelView; sidePanel.WebCollections.AddRange(newCollections.OfType <WebCollection>()); sidePanel.WebCollections.CallReset(); if (newCollections.Count > 0) { _userDialogs.OkMessageBox($"Collections uploaded", "Info", MessageBoxType.Success); } if (newCollections.Count == 1) { Process.Start($"https://osustats.ppy.sh/collection/{newCollections[0].OnlineId}"); } }
/// <summary> /// Method used to parse the navigation nodes for a web and all the subwebs. /// </summary> /// <param name="clientContext"></param> /// <param name="webs"></param> /// <param name="navigationNodes">Navigation nodes to add.</param> private static void ProcessWebNavigationNodes(ClientContext clientContext, WebCollection webs, List <PnP.Entities.NavigationNodeEntity> navigationNodes) { clientContext.Load(webs); clientContext.ExecuteQueryRetry(); foreach (var subWeb in webs) { ProcessWebNavigationNodes(clientContext, subWeb, navigationNodes); } }
private Web GetLatestSite() { WebCollection webCollection = GetRootSite().Webs; Web toReturn = webCollection.OrderByDescending(subsite => subsite.Created).FirstOrDefault(); this.SharePointContext.Load(toReturn, site => site.SiteUsers, site => site.Lists); this.SharePointContext.ExecuteQuery(); return(toReturn); }
static void scanweb(ClientContext ctx, Web w) { WebCollection webs = w.Webs; ctx.Load(ctx.Web.ParentWeb); ctx.Load(webs); ctx.ExecuteQuery(); foreach (Web iw in webs) { // if (iw.Title == "GST" ) // Console.WriteLine("====== " + iw.Title + " " + getWebDetails(ctx, iw, "sub")); } }
//gavdcodeend 15 //gavdcodebegin 16 static void SpCsCsomGetWebsInSiteCollection(ClientContext spCtx) { Site mySite = spCtx.Site; WebCollection myWebs = mySite.RootWeb.Webs; spCtx.Load(myWebs); spCtx.ExecuteQuery(); foreach (Web oneWeb in myWebs) { Console.WriteLine(oneWeb.Title + " - " + oneWeb.Url + " - " + oneWeb.Id); } }
static void scanweb(ClientContext ctx, Web w) { WebCollection webs = w.Webs; ctx.Load(webs); ctx.ExecuteQuery(); foreach (Web iw in webs) { ctx.Load(iw.ParentWeb); ctx.ExecuteQuery(); // if (iw.Title == "GST" ) Console.WriteLine("====== " + iw.Title + " : " + iw.Url); LogSiteStructure(iw.ParentWeb.Title, iw.Title, iw.Url); } }
private ToolStripItem[] GetCollectionSubmenus(WebCollection webCollection) { var loadCollection = new ToolStripMenuItem { Text = $"Load" }; loadCollection.Click += (s, a) => { SidePanelOperation?.Invoke(this, MainSidePanelActions.AddCollections, new List <WebCollection> { webCollection }); }; var uploadChanges = new ToolStripMenuItem { Text = $"Upload changes" }; uploadChanges.Click += (s, a) => { SidePanelOperation?.Invoke(this, MainSidePanelActions.UploadCollectionChanges, new List <WebCollection> { webCollection }); }; var deleteCollection = new ToolStripMenuItem { Text = $"Delete" }; deleteCollection.Click += (s, a) => { SidePanelOperation?.Invoke(this, MainSidePanelActions.RemoveWebCollection, new List <WebCollection> { webCollection }); }; var openOnWeb = new ToolStripMenuItem { Text = $"Open in browser" }; openOnWeb.Click += (s, a) => { Process.Start($"https://osustats.ppy.sh/collection/{webCollection.OnlineId}"); }; return(new ToolStripItem[] { loadCollection, uploadChanges, deleteCollection, openOnWeb }); }
public static List <Log> Update(ClientContext ctx, DefaultFields defaultFields) { //return Update(ctx, defaultFields, true); //} //static public List<Log> Update(ClientContext ctx, DefaultFields defaultFields, bool pushToLibrary) //{ Web web = ctx.Web; try { Log.ClearLog(); WebCollection webs = web.Webs; ListCollection lists = web.Lists; FieldCollection fields = web.Fields; ctx.Load(web); ctx.Load(webs); ctx.Load(lists, doclist => doclist.Where(doc => doc.BaseTemplate == 101)); //ctx.Load(fields, tcol => tcol.Include(t => t.InternalName, t => t.DefaultValue), tcol => tcol.Where(t => t.TypeAsString == "TaxonomyFieldType" || t.TypeAsString == "TaxonomyFieldTypeMulti")); ctx.Load(fields, tcol => tcol.Where(t => t.TypeAsString == "TaxonomyFieldType" || t.TypeAsString == "TaxonomyFieldTypeMulti")); ctx.ExecuteQuery(); defaultFields = UpdateSite(ctx, web, fields, defaultFields, false); foreach (List list in lists) { UpdateList(ctx, web, list.Title, defaultFields); } foreach (Web subweb in webs) { ListCollection subwebLists = subweb.Lists; ctx.Load(subwebLists, doclist => doclist.Where(dl => dl.BaseTemplate == 101)); ctx.ExecuteQuery(); foreach (List list in subwebLists) { //Console.WriteLine(subweb.Title + " " + list.Title); UpdateList(ctx, subweb, list.Title, defaultFields); } } // Push update of status field, as users isn't able to set column default value on library without it, due to some strange behavior from Microsoft PushStatusFieldUpdate(ctx, web, fields); } catch (Exception ex) { Log.Error(ctx.Web.Url, "", "", ex.Message); } return(Log.GetLog()); }
private void ScanContentTypes(Web oWorkWeb) { try { ctx.Load(oWorkWeb); ctx.ExecuteQuery(); ShowProgress(oWorkWeb.Url); WebCollection oWebs = oWorkWeb.Webs; ctx.Load(oWebs); ctx.ExecuteQuery(); foreach (Web oWeb in oWebs) { ScanContentTypes(oWeb); } ListCollection olists = oWorkWeb.Lists; ctx.Load(olists); ctx.ExecuteQuery(); foreach (List olist in olists) { try { ShowInfo(olist.Title); CamlQuery oQuery = CamlQuery.CreateAllItemsQuery(2000); ListItemCollection items = olist.GetItems(oQuery); ctx.Load(items); ctx.ExecuteQuery(); foreach (ListItem item in items) { ctx.Load(item.ContentType); ctx.ExecuteQuery(); IncrementTracker(item.ContentType.Name); } } catch (Exception ex) { ShowError(ex, "frmAnalyzeContentTypes.ScanContentTypes", "Inside"); } } } catch (Exception ex) { ShowError(ex, "frmAnalyzeContentTypes.ScanContentTypes", ex.StackTrace.ToString()); } }
static void getStastics(ClientContext context) { // Get the SharePoint web scanroot(context); WebCollection webs = context.Web.Webs; context.Load(webs); context.ExecuteQuery(); foreach (Web w in webs) { //if (w.Title == "GST") //{ // Console.WriteLine(w.Title + " " + getWebDetails(context, w, "sub")); scanweb(context, w); //} } }
public ActionResult ObtenerSitios() { string mainpath = "https://falabella.sharepoint.com"; var spContext = SharePointContextProvider.Current.GetSharePointContext(HttpContext); using (var clientContext = spContext.CreateUserClientContextForSPHost()) { WebCollection oWebsite = clientContext.Web.GetSubwebsForCurrentUser(new SubwebQuery()); clientContext.Load(oWebsite, n => n.Include(o => o.Title)); clientContext.ExecuteQuery(); foreach (Web orWebsite in oWebsite) { string newpath = mainpath + orWebsite.Title; System.Diagnostics.Debug.WriteLine(newpath + "\n" + orWebsite.Title); } } return(View("Index")); }
public static bool SubSiteExistsWithUrl(this Web web, string url) { Utility.EnsureWeb(web.Context, web, "ServerRelativeUrl"); string siteUrl = string.Format("{0}/{1}", web.ServerRelativeUrl, url).ToLowerInvariant(); WebCollection subSites = web.Webs; IEnumerable <Web> results = web.Context.LoadQuery <Web>(subSites.Where(item => item.ServerRelativeUrl == siteUrl)); web.Context.ExecuteQuery(); Web existingWeb = results.FirstOrDefault(); if (existingWeb != null) { return(true); } return(false); }
public void Process() { if (this.ww != null) { WebCollection oWebs = this.ww.Webs; this.ww.Context.Load(oWebs); this.ww.Context.ExecuteQuery(); foreach (Web sww in oWebs) { Console.WriteLine("Site: {0}", sww.Title); Console.ReadLine(); FunnelbackSite fbxs = new FunnelbackSite(); fbxs.myfbx = this.myfbx; fbxs.ww = sww; fbxs.Process(); } } }
public async Task <IActionResult> Sites(string url) { List <SharePointParam> results = new List <SharePointParam>(); try { // Starting with ClientContext, the constructor requires a URL to the // server running SharePoint. //string url = @"https://dddevops.sharepoint.com/"; using (ClientContext context = new ClientContext(url)) { context.Credentials = new SharePointOnlineCredentials(_username, _password); // Root Web Site Web spRootWebSite = context.Web; // Collecction of Sites under the Root Web Site WebCollection spSites = spRootWebSite.Webs; // Loading operations context.Load(spRootWebSite); context.Load(spSites); await context.ExecuteQueryAsync(); List <Task> tasks = new List <Task>(); // We need to iterate through the $spoSites Object in order to get individual sites information foreach (Web site in spSites) { context.Load(site); tasks.Add(context.ExecuteQueryAsync()); } Task.WaitAll(tasks.ToArray()); foreach (Web site in spSites) { SharePointParam item = new SharePointParam() { Title = site.Title, URL = site.Url }; results.Add(item); } } } catch (Exception ex) { return(StatusCode(StatusCodes.Status500InternalServerError, ex)); } return(new OkObjectResult(results)); }
static void getStastics(ClientContext context) { // Get the SharePoint web scanroot(context); WebCollection webs = context.Web.Webs; context.Load(webs); context.ExecuteQuery(); foreach (Web w in webs) { context.Load(w.ParentWeb); context.ExecuteQuery(); Console.WriteLine(w.Title + " : " + w.Url); LogSiteStructure(w.ParentWeb.Title, w.Title, w.Url); //Scan subweb scanweb(context, w); } }
private List <Web> GetSubWebsInternal(WebCollection subsites, bool recurse) { var subwebs = new List <Web>(); // Retrieve the subsites in the provided webs collection subsites.EnsureProperties(new Expression <Func <WebCollection, object> >[] { wc => wc.Include(w => w.Id) }); foreach (var subsite in subsites) { // Retrieve all the properties for this particular subsite subsite.EnsureProperties(RetrievalExpressions); subwebs.Add(subsite); if (recurse) { // As the Recurse flag has been set, recurse this method for it's child web collection subwebs.AddRange(GetSubWebsInternal(subsite.Webs, recurse)); } } return(subwebs); }
public async Task <IActionResult> Sites() { List <SharePointParam> results = new List <SharePointParam>(); try { // Root Web Site Web spRootWebSite = cc.Web; // Collecction of Sites under the Root Web Site WebCollection spSites = spRootWebSite.Webs; // Loading operations cc.Load(spRootWebSite); cc.Load(spSites); await cc.ExecuteQueryAsync(); List <Task> tasks = new List <Task>(); // We need to iterate through the $spoSites Object in order to get individual sites information foreach (Web site in spSites) { Console.WriteLine("Writing sites " + site); cc.Load(site); tasks.Add(cc.ExecuteQueryAsync()); } Task.WaitAll(tasks.ToArray()); foreach (Web site in spSites) { SharePointParam item = new SharePointParam() { Title = site.Title, URL = site.Url }; results.Add(item); } } catch (Exception ex) { return(StatusCode(StatusCodes.Status500InternalServerError, ex)); } return(new OkObjectResult(results)); }
public static void GetSubWebsAndCreateShortcut(string path, string shortcutpath, string[] domains) { try { // Foreach Domain Get SubWeb For Current User foreach (string domain in domains) { ClientContext clientContext = new ClientContext(path + domain); WebCollection oWebsite = clientContext.Web.GetSubwebsForCurrentUser(null); clientContext.Load(oWebsite); clientContext.ExecuteQuery(); foreach (Web orWebsite in oWebsite) { string newpath = shortcutpath + orWebsite.ServerRelativeUrl.Substring(1).Replace("/", "\\"); CreateShortcut(orWebsite.Title, newpath, path + orWebsite.ServerRelativeUrl.Substring(1)); } } } catch (Exception ex) { Console.WriteLine(ex.ToString()); } }
// Process an individual web and add it and it's children into the passed tree view node private static void PopulateTreeView(Web web, TreeNode currentnode, ClientContext clientcontext, BackgroundWorker worker) { worker.ReportProgress(0, MethodBase.GetCurrentMethod().Name.ToString() + ":" + web.Title); WebCollection webs = web.GetSubwebsForCurrentUser(null); clientcontext.Load(webs); clientcontext.ExecuteQuery(); // Go through every sub web of the passed web foreach (Web subweb in webs) { clientcontext.Load(subweb); clientcontext.ExecuteQuery(); // Create a node for the current web TreeNode subnode = new TreeNode(); // Set the node title (That displays to the user) subnode.Text = subweb.Title.ToString(); // Iterate any further sub webs and lists WebCollection furthersubwebs = subweb.Webs; ListCollection furtherlists = subweb.Lists; clientcontext.Load(furthersubwebs); clientcontext.Load(furtherlists); clientcontext.ExecuteQuery(); // If there are any further subwebs then add them foreach (Web furthersubweb in furthersubwebs) { PopulateTreeView(furthersubweb, subnode, clientcontext, worker); } // If there are any lists then add then foreach (List list in furtherlists) { clientcontext.Load(list); clientcontext.ExecuteQuery(); if (list.BaseType == BaseType.DocumentLibrary) { // At this point we are adding a list which is a document library and it is a valid destination // we will therefore supply a bit more detail about the item in a TAG TreeNode librarynode = new TreeNode(); librarynode.Text = list.Title; Folder folder = list.RootFolder; clientcontext.Load(folder); clientcontext.ExecuteQuery(); ImportDestination importdestination = new ImportDestination(); importdestination.DestinationFolderUrl = folder.ServerRelativeUrl; importdestination.DestinationWebUrl = subweb.ServerRelativeUrl; importdestination.DestinationServerUrl = Get_ServerURL_From_URL(clientcontext.Url); importdestination.DestinationLibraryName = list.Title; librarynode.Tag = importdestination; subnode.Nodes.Add(librarynode); } } clientcontext.ExecuteQuery(); // Add the completed node to the tree view control currentnode.Nodes.Add(subnode); } // Go through every list of the passed web ListCollection lists = web.Lists; clientcontext.Load(lists); clientcontext.ExecuteQuery(); foreach (List list in web.Lists) { clientcontext.Load(list); clientcontext.ExecuteQuery(); // Now check to see if the list is a document library if (list.BaseType == BaseType.DocumentLibrary) { // At this point we are adding a list which is a document library and it is a valid destination // we will therefore supply a bit more detail about the item in a TAG TreeNode librarynode = new TreeNode(); librarynode.Text = list.Title; Folder folder = list.RootFolder; clientcontext.Load(folder); clientcontext.ExecuteQuery(); ImportDestination importdestination = new ImportDestination(); importdestination.DestinationFolderUrl = folder.ServerRelativeUrl; importdestination.DestinationWebUrl = web.ServerRelativeUrl; importdestination.DestinationServerUrl = Get_ServerURL_From_URL(clientcontext.Url); importdestination.DestinationLibraryName = list.Title; librarynode.Tag = importdestination; currentnode.Nodes.Add(librarynode); } } // Now if after all this we have a web which has no document libraries or sub webs then we don't need it if (currentnode.Nodes.Count == 0) { currentnode.Remove(); } }
private void CreateSubsite(ClientContext context, string parentWebTitle, string subsiteFolderName, WebCollection subSiteCollection, ILogger logger) { try { if (context == null || string.IsNullOrEmpty(subsiteFolderName)) { return; } string subsiteInternalTitle = RemoveEmptySpaces(subsiteFolderName); Web parentWeb = context.Web; if (parentWeb.Title != parentWebTitle) { parentWeb = subSiteCollection.SingleOrDefault(e => e.Title.Equals(parentWebTitle)); } if (parentWeb == null) { subsiteInternalTitle = RemoveEmptySpaces(parentWebTitle); context.Web.Webs.Add(new Microsoft.SharePoint.Client.WebCreationInformation() { WebTemplate = "CMSPUBLISHING#0", Title = parentWebTitle, Description = parentWebTitle, Url = subsiteInternalTitle, Language = 1033, UseSamePermissionsAsParentSite = true }); context.ExecuteQuery(); parentWeb = subSiteCollection.SingleOrDefault(e => e.Title.Equals(parentWebTitle)); } if (!subSiteCollection.Where(w => w.Title == subsiteFolderName).Any()) { //If there is no subsites with the same name as the given subsite, create it parentWeb.Webs.Add(new Microsoft.SharePoint.Client.WebCreationInformation() { WebTemplate = "CMSPUBLISHING#0", Title = subsiteFolderName, Description = subsiteFolderName, Url = subsiteInternalTitle, Language = 1033, UseSamePermissionsAsParentSite = true }); context.ExecuteQuery(); logger.LogMessage(string.Format(Constants.SubSiteCreationSuccess, subsiteFolderName), LogType.Success); } else { logger.LogMessage(string.Format(Constants.DuplicateSubsite, subsiteFolderName), LogType.Info); } } catch (Exception ex) { logger.LogMessage(string.Format(Constants.SubSiteCreationError, subsiteFolderName, ex.Message), LogType.ErrorAndContinue); } }
private string[] ProvisionSPOElements(string directoryName, string fileName, ClientContext context, Web provisioningWeb, ILogger logger) { logger.LogMessage(Constants.ProvisionElementsAttempt, LogType.Info); string[] files = new string[0]; ArrayList directories = new ArrayList(); ArrayList foundfiles = new ArrayList(); directories.Add(directoryName); DirectoryInfo baseDir = new DirectoryInfo(AppDomain.CurrentDomain.BaseDirectory); string[] csProjFiles = System.IO.Directory.GetFiles(baseDir.Parent.Parent.FullName, "*.csproj"); List <string> includedProjectElements = ReadProjectStructure(csProjFiles.FirstOrDefault(), baseDir.Parent.Parent.FullName); while (directories.Count > 0) { string n = (string)directories[0]; try { if (!System.IO.Directory.Exists(n)) { logger.LogMessage(string.Format(Constants.DirectoryNotFound, n), LogType.ErrorAndAbort); } string[] foundfiles1 = System.IO.Directory.GetFiles(n, fileName, SearchOption.TopDirectoryOnly); if (foundfiles1 != null) { XMLTemplateProvider provider = new XMLFileSystemTemplateProvider(String.Format(@"{0}\..\..\", AppDomain.CurrentDomain.BaseDirectory), ""); foreach (string templateFile in foundfiles1) { if (!includedProjectElements.Contains(templateFile.ToLowerInvariant())) { continue; } //UpdateTermStoreId(templateFile, logger); //UpdatePnPFileRef(templateFile, logger); DirectoryInfo dirInfo = new DirectoryInfo(string.Format(@"{0}\..\..\", templateFile)); logger.LogMessage(string.Format(Constants.ProcessingElement, templateFile), LogType.Info); string siteTitle = dirInfo.FullName.Substring(0, dirInfo.FullName.LastIndexOf('\\')); siteTitle = siteTitle.Substring(siteTitle.LastIndexOf('\\') + 1); if (context.Web.Title != siteTitle) { context.Load(context.Web.Webs); context.ExecuteQuery(); foreach (Web webNode in context.Web.Webs) { if (webNode.Title == siteTitle) { provisioningWeb = webNode; break; } } } //Apply provision var template = provider.GetTemplate(templateFile); string connectionString = ConfigData.RootSiteMappingFolder.ToLowerInvariant(); if (!ConfigData.RootSiteMappingFolder.ToLowerInvariant().EndsWith("\\")) { connectionString += "\\"; } string container = templateFile.ToLowerInvariant().Replace(connectionString, string.Empty).Replace("template.xml", string.Empty); template.Connector = new FileSystemConnector(connectionString, container); try { ProvisioningTemplateApplyingInformation ptai = new ProvisioningTemplateApplyingInformation(); provisioningWeb.ApplyProvisioningTemplate(template); logger.LogMessage(string.Format(Constants.ProvisionSuccess, dirInfo.Name, provisioningWeb.Title), LogType.Success); } catch (Exception ex) { logger.LogMessage(string.Format(Constants.UnknownError, ex.Message), LogType.ErrorAndContinue); } } foundfiles.AddRange(foundfiles1); } } catch (Exception ex) { logger.LogMessage(string.Format(Constants.UnknownError, ex.Message), LogType.ErrorAndContinue); } try { string[] subdirectories = System.IO.Directory.GetDirectories(n); if (subdirectories != null) { string subsitesFolder = RemoveEmptySpaces(n.ToLowerInvariant()); if (subsitesFolder.EndsWith("subsites")) { //C:\something\MnS\Subsites\across\ DirectoryInfo dirInfo = new DirectoryInfo(string.Format(@"{0}\..\", n)); string parentSiteTitle = dirInfo.FullName.Substring(0, dirInfo.FullName.LastIndexOf('\\')); parentSiteTitle = parentSiteTitle.Substring(parentSiteTitle.LastIndexOf('\\') + 1); WebCollection subsites = context.Web.Webs; context.Load(subsites); context.ExecuteQuery(); foreach (string subsiteDir in subdirectories) { foreach (string item in includedProjectElements) { if (item.StartsWith(subsiteDir.ToLowerInvariant())) { CreateSubsite(context, parentSiteTitle, subsiteDir.Replace(n, string.Empty).Substring(1), subsites, logger); break; } } } } directories.AddRange(subdirectories); } } catch (Exception ex) { logger.LogMessage(string.Format(Constants.UnknownError, ex.Message), LogType.ErrorAndContinue); } directories.RemoveAt(0); } return((string[])foundfiles.ToArray(typeof(string))); }
static void Main(string[] args) { //lets create config file for funnelback and read it first Dictionary <string, string> fnb_config = new Dictionary <string, string>(); try { using (StreamReader sr = new StreamReader("funnelback.cfg")) { String line; while ((line = sr.ReadLine()) != null) { // Console.WriteLine(line); //Dictionary<string, string> fnb_dictionary = new Dictionary<string, string>(); string[] words = line.Split('='); fnb_config.Add(words[0], words[1]); // Console.WriteLine(words[0] + words[1]); } } } catch (Exception e) { // Let the user know what went wrong. Console.WriteLine("The file could not be read:"); Console.WriteLine(e.Message); } // Console.WriteLine(fnb_config); foreach (KeyValuePair <string, string> pair in fnb_config) { Console.WriteLine("{0}, {1}", pair.Key, pair.Value); } string[] wanted_fields = { "" }; // wanted_fields=new string[] {""}; if (fnb_config.ContainsKey("wanted_fields")) { string wanted_fields_string = fnb_config["wanted_fields"]; wanted_fields = wanted_fields_string.Split(','); } string[] cdata_fields = { "" }; if (fnb_config.ContainsKey("cdata_fields")) { string cdata_fields_string = fnb_config["cdata_fields"]; wanted_fields = cdata_fields_string.Split(','); } string[] lookup_fields = { "" }; if (fnb_config.ContainsKey("lookup_fields")) { string lookup_fields_string = fnb_config["lookup_fields"]; wanted_fields = lookup_fields_string.Split(','); } string target_site = fnb_config["target_site"]; string username = fnb_config["username"]; string password = fnb_config["password"]; string output_folder = fnb_config["output_folder"]; string auth_method = fnb_config["auth_method"]; FunnelbackXmlConfig fbx = new FunnelbackXmlConfig(); fbx.outputFolder = output_folder; fbx.targetSite = target_site; fbx.WantedFields = wanted_fields; fbx.CDataFields = cdata_fields; fbx.LookupFields = lookup_fields; //get all we need for claims authentication MsOnlineClaimsHelper claimsHelper = new MsOnlineClaimsHelper(target_site, username, password); //from now on we can use sharepoint being authenticated using (ClientContext ctx = new ClientContext(target_site)) { ctx.ExecutingWebRequest += claimsHelper.clientContext_ExecutingWebRequest; ctx.Load(ctx.Web); ctx.ExecuteQuery(); // //List all users FunnelbackConfig config = new FunnelbackConfig("funnelback.cfg"); FunnelbackUser m = new FunnelbackUser(ctx, config); m.ListUsers(); //End of users if (ctx != null) { using (StreamWriter writer = new StreamWriter(fbx.outputFolder + "\\first.xml")) { Site oSite = ctx.Site; WebCollection oWebs = oSite.RootWeb.Webs; FunnelbackXmlSite fbxs = new FunnelbackXmlSite(); fbxs.ww = oSite.RootWeb; fbxs.myfbx = fbx; fbxs.Process(); ctx.Load(oWebs); ctx.ExecuteQuery(); writer.WriteLine(@"<?xml version='1.0'?>"); writer.WriteLine(@"<sharepoint>"); foreach (Web oWebsite in oWebs) { ListCollection collList = oWebsite.Lists; ctx.Load(collList); // Query for Web ctx.ExecuteQuery(); // Execute writer.WriteLine(@"<site>"); writer.WriteLine("<title>{0}</title>", oWebsite.Title); foreach (List oList in collList) { writer.WriteLine("<list>{0}</list>", oList.Title); List oListy = collList.GetByTitle(oList.Title); CamlQuery camlQuery = new CamlQuery(); camlQuery.ViewXml = "<View><RowLimit>100</RowLimit></View>"; ListItemCollection collListItem = oListy.GetItems(camlQuery); ctx.Load(collListItem, items => items.IncludeWithDefaultProperties( item => item.DisplayName, item => item.HasUniqueRoleAssignments )); ctx.ExecuteQuery(); foreach (ListItem oListItem in collListItem) { FunnelbackXmlRecord oFXR = new FunnelbackXmlRecord(); oFXR.myfbx = fbx; oFXR.li = oListItem; oFXR.FunnelbackWriteXml(); FunnelbackItem oFI = new FunnelbackItem(oListItem); oFI.config = new FunnelbackConfig("funnelback.cfg"); XmlSerializer ser = new XmlSerializer(typeof(FunnelbackItem)); XmlWriter tx = XmlWriter.Create(fbx.outputFolder + "\\" + oFI.GetSafeFilename("xml")); ser.Serialize(tx, oFI); tx.Close(); } } writer.WriteLine(@"</site>"); } writer.WriteLine(@"</sharepoint>"); } } } }
public void ChangeMasterPageForSiteCollection(string outPutFolder, string SiteCollectionUrl, string NewMasterPageURL, string OldMasterPageURL = "N/A", bool CustomMasterUrlStatus = true, bool MasterUrlStatus = true, string SharePointOnline_OR_OnPremise = "N/A", string UserName = "******", string Password = "******", string Domain = "N/A") { string exceptionCommentsInfo1 = string.Empty; List <MasterPageBase> _WriteMasterList = new List <MasterPageBase>(); //Initialized Exception and Logger. Deleted the Master Page Replace Usage File MasterPage_Initialization(outPutFolder); Logger.AddMessageToTraceLogFile(Constants.Logging, "############## Master Page Trasnformation Utility Execution Started - For Site Collection ##############"); Console.WriteLine("############## Master Page Trasnformation Utility Execution Started - For Site Collection ##############"); Logger.AddMessageToTraceLogFile(Constants.Logging, "[DATE TIME] " + Logger.CurrentDateTime()); Console.WriteLine("[DATE TIME] " + Logger.CurrentDateTime()); Logger.AddMessageToTraceLogFile(Constants.Logging, "[START] ENTERING IN FUNCTION ::: ChangeMasterPageForSiteCollection"); Console.WriteLine("[START] ENTERING IN FUNCTION ::: ChangeMasterPageForSiteCollection"); Logger.AddMessageToTraceLogFile(Constants.Logging, "[ChangeMasterPageForSiteCollection] Initiated Logger and Exception Class. Logger and Exception file will be available at path " + outPutFolder); Console.WriteLine("[ChangeMasterPageForSiteCollection] Initiated Logger and Exception Class. Logger and Exception file will be available at path " + outPutFolder); Logger.AddMessageToTraceLogFile(Constants.Logging, "[ChangeMasterPageForSiteCollection] SiteCollectionUrl is " + SiteCollectionUrl); Console.WriteLine("[ChangeMasterPageForSiteCollection] SiteCollectionUrl is " + SiteCollectionUrl); try { AuthenticationHelper ObjAuth = new AuthenticationHelper(); ClientContext clientContext = null; //SharePoint on-premises / SharePoint Online Dedicated => OP (On-Premises) if (SharePointOnline_OR_OnPremise.ToUpper() == Constants.OnPremise) { clientContext = ObjAuth.GetNetworkCredentialAuthenticatedContext(SiteCollectionUrl, UserName, Password, Domain); } //SharePointOnline => OL (Online) else if (SharePointOnline_OR_OnPremise.ToUpper() == Constants.Online) { clientContext = ObjAuth.GetSharePointOnlineAuthenticatedContextTenant(SiteCollectionUrl, UserName, Password); } if (clientContext != null) { bool headerMasterPage = false; MasterPageBase objMPBase = new MasterPageBase(); Web rootWeb = clientContext.Web; clientContext.Load(rootWeb); clientContext.ExecuteQuery(); //This is for Exception Comments: ExceptionCsv.SiteCollection = SiteCollectionUrl; ExceptionCsv.WebUrl = rootWeb.Url.ToString(); exceptionCommentsInfo1 = "<Input>New MasterPage Url = " + NewMasterPageURL + ", <Input> OLD MasterUrl: " + OldMasterPageURL + ", WebUrl: " + rootWeb.Url.ToString() + ", CustomMasterUrlStatus" + CustomMasterUrlStatus + "MasterUrlStatus" + MasterUrlStatus; //This is for Exception Comments: //Root Web objMPBase = ChangeMasterPageForWeb(outPutFolder, rootWeb.Url.ToString(), NewMasterPageURL, OldMasterPageURL, CustomMasterUrlStatus, MasterUrlStatus, Constants.ActionType_SiteCollection, SharePointOnline_OR_OnPremise, UserName, Password, Domain); if (objMPBase != null) { _WriteMasterList.Add(objMPBase); } WebCollection webCollection = rootWeb.Webs; clientContext.Load(webCollection); clientContext.ExecuteQuery(); foreach (Web webSite in webCollection) { try { //This is for Exception Comments: ExceptionCsv.SiteCollection = SiteCollectionUrl; ExceptionCsv.WebUrl = webSite.Url.ToString(); exceptionCommentsInfo1 = "<Input>New MasterPage Url = " + NewMasterPageURL + ", <Input> OLD MasterUrl: " + OldMasterPageURL + ", WebUrl: " + webSite.Url.ToString() + ", CustomMasterUrlStatus" + CustomMasterUrlStatus + "MasterUrlStatus" + MasterUrlStatus; //This is for Exception Comments: //Web objMPBase = ChangeMasterPageForWeb(outPutFolder, webSite.Url, NewMasterPageURL, OldMasterPageURL, CustomMasterUrlStatus, MasterUrlStatus, Constants.ActionType_SiteCollection, SharePointOnline_OR_OnPremise, UserName, Password, Domain); if (objMPBase != null) { _WriteMasterList.Add(objMPBase); } } catch (Exception ex) { ExceptionCsv.WriteException(ExceptionCsv.WebApplication, ExceptionCsv.SiteCollection, ExceptionCsv.WebUrl, "MasterPage", ex.Message, ex.ToString(), "ChangeMasterPageForSiteCollection", ex.GetType().ToString(), exceptionCommentsInfo1); Logger.AddMessageToTraceLogFile(Constants.Logging, "[EXCEPTION] [ChangeMasterPageForSiteCollection] ChangeMasterPageForSiteCollection. Exception Message: " + ex.Message + ", Exception Comment: " + exceptionCommentsInfo1); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("[Exception] [ChangeMasterPageForSiteCollection]. Exception Message:" + ex.Message + ", Exception Comment: " + exceptionCommentsInfo1); Console.ForegroundColor = ConsoleColor.Gray; } } if (_WriteMasterList != null) { if (_WriteMasterList.Count > 0) { FileUtility.WriteCsVintoFile(outPutFolder + @"\" + Constants.MasterPageUsage, ref _WriteMasterList, ref headerMasterPage); } } } Logger.AddMessageToTraceLogFile(Constants.Logging, "[END] [ChangeMasterPageForSiteCollection] EXIT FROM FUNCTION ChangeMasterPageForSiteCollection for SiteCollectionUrl: " + SiteCollectionUrl); Console.WriteLine("[END] [ChangeMasterPageForSiteCollection] EXIT FROM FUNCTION ChangeMasterPageForSiteCollection for SiteCollectionUrl: " + SiteCollectionUrl); Logger.AddMessageToTraceLogFile(Constants.Logging, "############## Master Page Trasnformation Utility Execution Completed - For Site Collection ##############"); Console.WriteLine("############## Master Page Trasnformation Utility Execution Completed - For Site Collection ##############"); } catch (Exception ex) { ExceptionCsv.WriteException(ExceptionCsv.WebApplication, ExceptionCsv.SiteCollection, ExceptionCsv.WebUrl, "MasterPage", ex.Message, ex.ToString(), "ChangeMasterPageForSiteCollection", ex.GetType().ToString(), exceptionCommentsInfo1); Logger.AddMessageToTraceLogFile(Constants.Logging, "[EXCEPTION] [ChangeMasterPageForSiteCollection] Exception Message: " + ex.Message + ", Exception Comment: " + exceptionCommentsInfo1); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("[Exception] [ChangeMasterPageForSiteCollection]. Exception Message:" + ex.Message + ", Exception Comment: " + exceptionCommentsInfo1); Console.ForegroundColor = ConsoleColor.Gray; } }