public override void Process(ServiceProcess process, object arguments) { FieldRemoverParameters parameters = (FieldRemoverParameters)arguments; if (!Directory.Exists(parameters.Directory)) { throw new BaseServiceException(string.Format(CultureInfo.InvariantCulture, "Directory '{0}' does not exist.", parameters.Directory)); } var client = PowerTools.Common.CoreService.Client.GetCoreService(); try { string[] files = Directory.GetFiles(parameters.Directory); int i = 0; foreach (string file in files) { process.SetStatus("Importing image: " + Path.GetFileName(file)); process.SetCompletePercentage(++i * 100 / files.Length); System.Threading.Thread.Sleep(500); // Temp, until it actually does something :) } process.Complete(); } finally { if (client != null) { client.Close(); } } }
/// <summary> /// Main method performing CoreService call to retrieve all AppData for the given item Uri /// </summary> /// <param name="process">ServiceProcee the async process to use</param> /// <param name="arguments">parameter object with item TcmUri to retrieve AppData for</param> public override void Process(ServiceProcess process, object arguments) { _data = new List<AppDataInspectorData>(); AppDataInspectorParameters parameters = (AppDataInspectorParameters)arguments; process.SetCompletePercentage(10); process.SetStatus(Resources.ProgressStatusInitializing); using (var coreService = Client.GetCoreService()) { process.SetCompletePercentage(20); process.SetStatus(Resources.AppDataInspectorRetrievingData); ApplicationData[] appDataList = coreService.ReadAllApplicationData(parameters.ItemUri).OrderBy(data => data.ApplicationId).ToArray(); double progressIncrement = appDataList.Length == 0 ? 0 : 80 / appDataList.Length; //nasty progress calculation int i = 1; foreach (ApplicationData appData in appDataList) { _data.Add(new AppDataInspectorData // create response data object and add it to response data collection { ApplicationId = appData.ApplicationId, Value = ByteArrayToObject(appData).ToString(), Type = appData.TypeId }); int progressPercentage = (int)(20 + i * progressIncrement); // some more nasty progress calculation process.SetCompletePercentage(progressPercentage); i++; } process.Complete(Resources.ProgressStatusComplete); } }
public ServiceProcess ExecuteAsync(object arguments) { ServiceProcess newProcess = new ServiceProcess(); ServiceProcessHelper storedProcess = new ServiceProcessHelper(newProcess); OperationContext.Current.InstanceContext.Extensions.Add(storedProcess); ExecuteData executeData = new ExecuteData { Process = storedProcess.Process, Arguments = arguments }; ThreadPool.QueueUserWorkItem(WorkerThread, executeData); return newProcess; }
public override void Process(ServiceProcess process, object arguments) { while (process.PercentComplete < 100) { process.IncrementCompletePercentage(); process.SetStatus("Loading..."); Random rnd = new Random(DateTime.Now.GetHashCode()); Thread.Sleep(((int)(rnd.NextDouble() * 4) + 10) * 10); } process.Complete(); }
public override void Process(ServiceProcess process, object arguments) { PagePublisherParameters parameters = (PagePublisherParameters)arguments; process.SetCompletePercentage(0); process.SetStatus("Initializing"); using (var coreService = Client.GetCoreService()) { _pagePublisherData = new PagePublisherData(); // get a list of the items from the core service ItemsFilterData filter = GetFilter(parameters); XElement listXml = coreService.GetListXml(parameters.LocationId, filter); // Get the page id's that will be published string[] pageIds = GetPageIds(listXml); int batchSize = 5; int currentBatch = 0; // Publish pages try { double ratio = pageIds.Count() /batchSize; double percentage = 100/ratio; double currperc = 0; while (currentBatch * batchSize < pageIds.Count()) { string[] nextBatch = pageIds.Skip(currentBatch * batchSize) .Take(batchSize).ToArray(); coreService.Publish(nextBatch, GetPublishInstructionData(parameters), parameters.TargetUri, parameters.Priority, new ReadOptions()); currentBatch++; currperc += percentage; if (currperc >= 1) { process.IncrementCompletePercentage(); currperc = 0; } } _pagePublisherData.SuccessMessage = string.Format("{0} Pages published successfully", pageIds.Length.ToString()); } catch (Exception ex) { //process.Complete(string.Format("Failed to publish, reason: {0}", ex.Message)); _pagePublisherData.FailedMessage = string.Format("Page publishing failed, reason {0}", ex.Message); } process.Complete("done"); } }
public override void Process(ServiceProcess process, object arguments) { CompSyncParameters parameters = (CompSyncParameters)arguments; if (parameters.SelectedUrIs == null) { throw new BaseServiceException(string.Format(CultureInfo.InvariantCulture, "List '{0}' is null.", parameters.SelectedUrIs)); } var client = Client.GetCoreService(); try { int i = 0; ComponentData ReferenceComponentData = client.Read(parameters.ReferenceComponentUri,new ReadOptions()) as ComponentData; foreach (string uri in parameters.SelectedUrIs) { ComponentData currentComponent = client.Read(uri, new ReadOptions()) as ComponentData; process.SetStatus("Synchronizing: " + currentComponent.Title); _processedItems.Add(uri); process.SetCompletePercentage(++i * 100 / parameters.SelectedUrIs.Length); System.Threading.Thread.Sleep(500); // Temp, until it actually does something :) } process.SetStatus("Synchronization succesfully finished!"); process.Complete(); _processedItems = new ArrayList(); } finally { if (client != null) { client.Close(); } } }
public override void Process(ServiceProcess process, object arguments) { MarkUnpublishedParameters parameters = (MarkUnpublishedParameters)arguments; if (string.IsNullOrEmpty(parameters.OrgItemURI)) { throw new BaseServiceException(string.Format(CultureInfo.InvariantCulture, "parameters.OrgItemURI is null or empty")); } var client = PowerTools.Common.CoreService.Client.GetCoreService(); try { process.Complete(); } finally { if (client != null) { client.Close(); } } }
/// <summary> /// Contains actual processing logic: instantiates a CoreService client, queries for item list, /// then sets the actual response counts data object, while updating the progress all along. /// </summary> /// <param name="process">the current ServiceProcess</param> /// <param name="arguments">CountItemsParameters arguments</param> public override void Process(ServiceProcess process, object arguments) { CountItemsParameters parameters = (CountItemsParameters)arguments; process.SetCompletePercentage(25); process.SetStatus("Initializing"); using (var coreService = Client.GetCoreService()) { ItemsFilterData filter = GetFilter(parameters); process.SetCompletePercentage(50); process.SetStatus("Retrieving count data"); XElement listXml = coreService.GetListXml(parameters.OrgItemUri, filter); process.SetCompletePercentage(75); process.SetStatus("Extracting item counts"); ProcessCounts(listXml); process.Complete("Done"); } }
public ServiceProcessHelper(ServiceProcess process) { Process = process; }
public override void Process(ServiceProcess process, object arguments) { PagePublisherParameters parameters = (PagePublisherParameters)arguments; process.SetCompletePercentage(0); process.SetStatus("Initializing"); using (var coreService = Client.GetCoreService()) { _pagePublisherData = new PagePublisherData(); string[] pageIds; if (parameters.LocationId.EndsWith("-1") || parameters.LocationId.EndsWith("-4")) // Publication or Structure Group { // get a list of the items from the core service ItemsFilterData filter = GetFilter(parameters); XElement listXml = coreService.GetListXml(parameters.LocationId, filter); // Get the page id's that will be published pageIds = GetPageIds(listXml); } else // Component { var readOptions = new ReadOptions(); // Get the current component var componentData = (ComponentData)coreService.Read(parameters.LocationId, readOptions); // Get the initial set of using items var filter = new UsingItemsFilterData { BaseColumns = ListBaseColumns.Default, IncludedVersions = VersionCondition.OnlyLatestAndCheckedOutVersions, IncludeLocalCopies = true, ItemTypes = new[] { ItemType.Component, ItemType.Page } }; var usingItemsXml = coreService.GetListXml(parameters.LocationId, filter); var pageIdsList = GetPageIds(usingItemsXml).ToList(); var level = 1; // We set the depth limit to 10, just so that we will never get an infinite loop in case // component 1 is included within a component 2 that is included within component 1. int depthLimit = 10; var componentIdsList = GetComponentIds(usingItemsXml).ToList(); var targets = componentIdsList.Distinct(StringComparer.InvariantCultureIgnoreCase); while (level <= depthLimit && targets.Count() > 0) { var nextTargets = new HashSet<string>(); foreach (var targetId in targets) { usingItemsXml = coreService.GetListXml(targetId, filter); pageIdsList.AddRange(GetPageIds(usingItemsXml)); foreach (var e in usingItemsXml.Elements()) { nextTargets.Add(e.Attribute("ID").Value); } } targets = nextTargets.ToList(); level++; } pageIds = pageIdsList.ToArray(); } int batchSize = 5; int currentBatch = 0; // Publish pages try { double ratio = pageIds.Count() /batchSize; double percentage = 100/ratio; double currperc = 0; while (currentBatch * batchSize < pageIds.Count()) { string[] nextBatch = pageIds.Skip(currentBatch * batchSize) .Take(batchSize).ToArray(); coreService.Publish(nextBatch, GetPublishInstructionData(parameters), parameters.TargetUri, parameters.Priority, new ReadOptions()); currentBatch++; currperc += percentage; if (currperc >= 1) { process.IncrementCompletePercentage(); currperc = 0; } } _pagePublisherData.SuccessMessage = string.Format("{0} Pages published successfully", pageIds.Length.ToString()); } catch (Exception ex) { //process.Complete(string.Format("Failed to publish, reason: {0}", ex.Message)); _pagePublisherData.FailedMessage = string.Format("Page publishing failed, reason {0}", ex.Message); } process.Complete("done"); } }
public abstract void Process(ServiceProcess process, object arguments);
public override void Process(ServiceProcess process, object arguments) { //Empty }
public override void Process(ServiceProcess process, object arguments) { ImageUploadParameters parameters = (ImageUploadParameters)arguments; try { string directory = parameters.Directory; if (!Directory.Exists(directory)) { process.Failed = true; process.Complete(string.Format(CultureInfo.InvariantCulture, "Directory '{0}' does not exist. No images were uploaded!", directory)); return; } string[] files = Directory.GetFiles(directory); int i = 0; _client = PowerTools.Common.CoreService.Client.GetCoreService(); //Get all component titles in the target folder _componentTitles = getAllComponentTitles(parameters.FolderUri); foreach (string file in files) { process.SetStatus("Importing image: " + Path.GetFileName(file)); process.SetCompletePercentage(++i * 100 / files.Length); FileInfo fileInfo = new FileInfo(file); if (fileInfo.Exists) { string mmType = GetMultiMediaType(fileInfo.Extension); if (mmType != null) { BinaryContentData bcd = new BinaryContentData { UploadFromFile = file, MultimediaType = new LinkToMultimediaTypeData { IdRef = mmType }, Filename = file, IsExternal = false }; ComponentData compData = new ComponentData { LocationInfo = new LocationInfo { OrganizationalItem = new LinkToOrganizationalItemData { IdRef = parameters.FolderUri //Organizational item }, }, ComponentType = ComponentType.Multimedia, Title = MakeValidFileName(fileInfo.Name), Schema = new LinkToSchemaData { IdRef = parameters.SchemaUri //schemaData.IdRef }, IsBasedOnMandatorySchema = false, IsBasedOnTridionWebSchema = true, ApprovalStatus = new LinkToApprovalStatusData { IdRef = "tcm:0-0-0" }, Id = "tcm:0-0-0", BinaryContent = bcd }; ComponentData comp = (ComponentData)_client.Create(compData, new ReadOptions()); } } } process.Complete(); } finally { if (_client != null) { _client.Close(); } } }
public override void Process(ServiceProcess process, object arguments) { _duplicateData = new List<DuplicateBinariesData>(); fileNames = new Dictionary<string, string>(); DuplicateBinariesParameters parameters = (DuplicateBinariesParameters)arguments; //_duplicateBinariesData = new DuplicateBinariesData(); //_duplicateBinariesData.PublicationId = parameters.PublicationId; process.SetCompletePercentage(10); process.SetStatus("working"); using (var coreService = Client.GetCoreService()) { try { process.SetCompletePercentage(15); process.SetStatus("Creating publication filter"); // Create a filter to only fetch multimedia components from the publication RepositoryItemsFilterData filter = new RepositoryItemsFilterData(); filter.ItemTypes = new[] { ItemType.Component }; filter.ComponentTypes = new[] { ComponentType.Multimedia }; XElement mmComponentsListXml = coreService.GetListXml(parameters.PublicationId, filter); XNamespace tcm = "http://www.tridion.com/ContentManager/5.0"; double progressIncrement = mmComponentsListXml.Value.Length == 0 ? 0 : 80 / mmComponentsListXml.Value.Length; //nasty progress calculation int i = 1; // keep a list of all the file names from the items in the publication // the if a file already exists in the filenames, it is considered a 'duplicate file name' foreach(XElement itemElem in mmComponentsListXml.Descendants(tcm + "Item")) { string itemId = itemElem.Attribute("ID").Value; string binaryFileName = GetFileNameFromComponent(coreService, itemId); fileNames.Add(itemId, binaryFileName); int progressPercentage = (int)(20 + i * progressIncrement); // some more nasty progress calculation process.SetCompletePercentage(progressPercentage); i++; } var duplicateValues = fileNames.ToLookup(a => a.Value). Where(b => b.Count() > 1); // todo - refactor this below item to select the id's and values from the file // name list foreach (var group in duplicateValues) { foreach (KeyValuePair<string, string> kvp in group) { _duplicateData.Add(new DuplicateBinariesData { ItemTcmId = kvp.Key, ItemFileName = kvp.Value, }); } } process.Complete("Done"); } catch (Exception ex) { // TODO: Update the GUI that there has been error - solution below is temporary process.Failed = true; process.Complete(string.Format("Failure finding duplicate items reason: {0}", ex.Message)); return; } } }