protected void SetCellInternal(Cells slice, double amount, bool clear, IOutput output) { var POVs = slice.POVs; output.InitProgress(string.Format("{0} cells", clear ? "Clearing" : "Setting"), POVs.Length); foreach (var pov in POVs) { if (HFM.HasVariableCustoms) { #if HFM_11_1_2_2 HFM.Try("Setting cell", () => _hsvData.SetCellExtDim(pov.HfmPovCOM, amount, clear)); #else HFM.ThrowIncompatibleLibraryEx(); #endif } else { HFM.Try("Setting cell", () => _hsvData.SetCell(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.View.Id, pov.Entity.Id, pov.Entity.ParentId, pov.Value.Id, pov.Account.Id, pov.ICP.Id, pov.Custom1.Id, pov.Custom2.Id, pov.Custom3.Id, pov.Custom4.Id, amount, clear)); } if (output.IterationComplete()) { break; } } output.EndProgress(); }
public void LoadDataExtractTemplate( [Parameter("The path to the data extract template")] string templateFile, [Parameter("The name to give the template; if omitted, defaults to the name of the file", DefaultValue = null)] string templateName, [Parameter("Flag indicating whether any existing template should be overwritten", DefaultValue = true)] bool overwrite) { string template = null; FileUtilities.EnsureFileExists(templateFile); using (var sr = new StreamReader(templateFile)) { template = sr.ReadToEnd(); } if (templateName == null) { templateName = Path.GetFileNameWithoutExtension(templateFile); } HFM.Try("Loading extended analytics template from {0}", templateFile, () => HsvStarSchemaTemplates.SetTemplate(templateName, template, overwrite)); _log.InfoFormat("Uploaded data extract template {0}", templateName); }
public void ExtractJournals( [Parameter("Path to the generated journal extract file")] string journalFile, [Parameter("Path to the extract log file; if not specified, defaults to same path " + "and name as journal file.", DefaultValue = null)] string logFile, [Parameter("The scenario to include in the extract")] string scenario, [Parameter("The year to include in the extract")] string year, [Parameter("The period to include in the extract")] string period, ExtractOptions options, Metadata metadata) { options["Scenario"] = metadata["Scenario"].GetId(scenario); options["Year"] = metadata["Year"].GetId(year); options["Period"] = metadata["Period"].GetId(period); if (logFile == null || logFile == "") { logFile = Path.ChangeExtension(journalFile, ".log"); } // Ensure dataFile and logFile are writeable locations FileUtilities.EnsureFileWriteable(journalFile); FileUtilities.EnsureFileWriteable(logFile); HFM.Try("Extracting journals", () => HsvJournalLoadACV.Extract(journalFile, logFile)); }
/// Calculates a Scenario/Year/Period/Entity combination specified in /// the POV internal void CalculatePOV(POV pov, bool force) { _log.FineFormat("Calculating {0}", pov); HFM.Try(() => _hsvCalculate.ChartLogic(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.Entity.Id, pov.Entity.ParentId, pov.Value.Id, force)); }
/// Returns the data value in the specified cell, or null if the cell /// contains no data internal double?GetCellValue(POV pov) { double amount = 0; int status = 0; if (HFM.HasVariableCustoms) { #if HFM_11_1_2_2 HFM.Try("Getting cell data value for {0}", pov, () => _hsvData.GetCellExtDim(pov.HfmPovCOM, out amount, out status)); #else HFM.ThrowIncompatibleLibraryEx(); #endif } else { HFM.Try("Getting cell data value for {0}", pov, () => _hsvData.GetCell(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.View.Id, pov.Entity.Id, pov.Entity.ParentId, pov.Value.Id, pov.Account.Id, pov.ICP.Id, pov.Custom1.Id, pov.Custom2.Id, pov.Custom3.Id, pov.Custom4.Id, out amount, out status)); } if (ECalcStatus.NoData.IsSet(status)) { return(null); } else { return(amount); } }
/// Returns a bit-field representing the cell metadata etc status internal int GetCellStatus(POV pov) { int status = -1; int extStatus = -1; int valueId = pov.IsSpecified(EDimension.Value) ? pov.Value.Id : pov.Entity.DefaultCurrencyId; if (HFM.HasVariableCustoms) { #if HFM_11_1_2_2 HFM.Try("Retrieving cell status for {0}", pov, () => _hsvData.GetStatusExExtDim(pov.HfmPovCOM, false, out status, out extStatus)); #else HFM.ThrowIncompatibleLibraryEx(); #endif } else { HFM.Try("Retrieving cell status for {0}", pov, () => _hsvData.GetStatusEx(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.View.Id, pov.Entity.Id, pov.Entity.ParentId, valueId, pov.Account.Id, pov.ICP.Id, pov.Custom1.Id, pov.Custom2.Id, pov.Custom3.Id, pov.Custom4.Id, false, out status, out extStatus)); } return(status); }
protected override void GetProcessState(ProcessUnits slice, IOutput output) { short state = 0; output.SetHeader("POV", 58, "Process State", 15); foreach (var pov in GetProcessUnits(slice)) { if (HFM.HasVariableCustoms) { #if HFM_11_1_2_2 HFM.Try("Retrieving phased submission process state for {0}", pov, () => _hsvProcessFlow.GetPhasedSubmissionStateExtDim(pov.HfmPovCOM, out state)); #else HFM.ThrowIncompatibleLibraryEx(); #endif } else { HFM.Try("Retrieving phased submission process state for {0}", pov, () => _hsvProcessFlow.GetPhasedSubmissionState(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.Entity.Id, pov.Entity.ParentId, pov.Value.Id, pov.Account.Id, pov.ICP.Id, pov.Custom1.Id, pov.Custom2.Id, pov.Custom3.Id, pov.Custom4.Id, out state)); } output.WriteRecord(pov, (EProcessState)state); } output.End(); }
protected void GetGroupPhase(POV pov, out int group, out int phase) { string sGroup = null, sPhase = null; if (HFM.HasVariableCustoms) { #if HFM_11_1_2_2 HFM.Try("Retrieving submission group and phase", () => _hsvProcessFlow.GetGroupPhaseFromCellExtDim(pov.HfmPovCOM, out sGroup, out sPhase)); #else HFM.ThrowIncompatibleLibraryEx(); #endif } else { HFM.Try("Retrieving submission group and phase", () => _hsvProcessFlow.GetGroupPhaseFromCell(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.Entity.Id, pov.Entity.ParentId, pov.Value.Id, pov.Account.Id, pov.ICP.Id, pov.Custom1.Id, pov.Custom2.Id, pov.Custom3.Id, pov.Custom4.Id, out sGroup, out sPhase)); } group = int.Parse(sGroup); phase = int.Parse(sPhase); }
public byte[] GetDocument( [Parameter("The path to the folder from which to retrieve the docuemnt")] string path, [Parameter("The name of the document to retrieve")] string name, // Multiple documents with the same name, but different document types can exist // within a folder [Parameter("The document type to look for; as names need not be unique within a folder, " + "the document type can be used to disambiguate the actual document required. " + "However, if the document you are after is unique, you can specify a document " + "type of 'All' to retrieve the first document with the specified name.", DefaultValue = EDocumentType.All)] EDocumentType documentType) { string docContent = null; object desc = null, secClass = null; // Find the document in the cache to determine its actual type and file type var doc = FindDocument(path, name, documentType); if (doc != null) { HFM.Try("Retrieving document", () => docContent = (string)_documents.GetDocument(doc.Folder, doc.Name, (int)doc.DocumentType, (int)doc.DocumentFileType, ref desc, ref secClass)); } else { throw new DocumentException("No document named {0} could be found at {1}", name, path); } return(FileUtilities.GetBytes(docContent)); }
public void SaveDocument( [Parameter("The path to the folder in which to save the document")] string path, [Parameter("The name to give the document")] string name, [Parameter("The description to give the document")] string desc, [Parameter("The document type to save the document as")] EDocumentType documentType, [Parameter("The document file type to save the document as")] EDocumentFileType documentFileType, [Parameter("The content for the new document")] string content, [Parameter("The security class to assign the document", DefaultValue = "[Default]")] string securityClass, [Parameter("If true, the document is saved as a private document; otherwise, it is public", DefaultValue = false)] bool isPrivate, [Parameter("True to overwrite any existing document with the same name in the folder", DefaultValue = true)] bool overwrite) { HFM.Try("Saving document {0} to {1}", name, path, () => _documents.SaveDocument2(path, name, desc, (int)documentType, (int)documentFileType, securityClass, content, isPrivate, (int)EDocumentType.All, overwrite)); // Update cache LoadCache(path, false); }
protected override void GetHistory(POV pov, IOutput output) { object oDates = null, oUsers = null, oActions = null, oStates = null, oAnnotations = null, oPaths = null, oFiles = null; if (HFM.HasVariableCustoms) { #if HFM_11_1_2_2 HFM.Try("Retrieving process history for {0}", pov, () => _hsvProcessFlow.PhasedSubmissionGetHistory2ExtDim(pov.HfmPovCOM, out oDates, out oUsers, out oActions, out oStates, out oAnnotations, out oPaths, out oFiles)); #else HFM.ThrowIncompatibleLibraryEx(); #endif } else { HFM.Try("Retrieving process history {0}", pov, () => _hsvProcessFlow.PhasedSubmissionGetHistory2(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.Entity.Id, pov.Entity.ParentId, pov.Value.Id, pov.Account.Id, pov.ICP.Id, pov.Custom1.Id, pov.Custom2.Id, pov.Custom3.Id, pov.Custom4.Id, out oDates, out oUsers, out oActions, out oStates, out oAnnotations, out oPaths, out oFiles)); } OutputHistory(output, pov, oDates, oUsers, oActions, oStates, oAnnotations, oPaths, oFiles); }
public void CreateApplication( [Parameter("The name of the cluster on which to create the application", Alias = "ClusterName")] string cluster, [Parameter("The name to be given to the new application", Alias = "AppName")] string application, [Parameter("The description for the new application (cannot be blank)", Alias = "AppDesc")] string description, [Parameter("Path to the application profile (.per) file used to define the " + "time and custom dimensions")] string profilePath, [Parameter("The name of the project to assign the application to in Shared Services", DefaultValue = "Default Application Group")] string sharedServicesProject, [Parameter("The URL of the virtual directory for Financial Management. " + "The URL should include the protocol, Web server name and port, and virtual " + "directory name, e.g. http://<server>:80/hfm")] string appWebServerUrl) { byte[] profile = File.ReadAllBytes(profilePath); HFM.Try(string.Format("Creating application {0} on {1}", application, cluster), () => Client.HsxClient.CreateApplicationCAS(cluster, "Financial Management", application, description, "", profile, null, null, null, null, sharedServicesProject, appWebServerUrl)); }
internal Connection(Client client, string token) { Client = client; _token = token; HFM.Try("Setting logon credentials via SSO token", () => Client.HsxClient.SetLogonInfoSSO(null, null, token, null)); }
protected override EProcessState SetProcessState(POV pov, EProcessAction action, EProcessState targetState, string annotation, string[] paths, string[] files) { short newState = 0; if (HFM.HasVariableCustoms) { #if HFM_11_1_2_2 HFM.Try("Setting phased submission state for {0}", pov, () => _hsvProcessFlow.PhasedSubmissionProcessManagementChangeStateForMultipleEntities2ExtDim( pov.HfmSliceCOM, annotation, (int)action, false, false, (short)targetState, paths, files, out newState)); #else HFM.ThrowIncompatibleLibraryEx(); #endif } else { HFM.Try("Setting phased submission state for {0}", pov, () => _hsvProcessFlow.PhasedSubmissionProcessManagementChangeStateForMultipleEntities2( pov.Scenario.Id, pov.Year.Id, pov.Period.Id, new int[] { pov.Entity.Id }, new int[] { pov.Entity.ParentId }, pov.Value.Id, new int[] { pov.Account.Id }, new int[] { pov.ICP.Id }, new int[] { pov.Custom1.Id }, new int[] { pov.Custom2.Id }, new int[] { pov.Custom3.Id }, new int[] { pov.Custom4.Id }, annotation, (int)action, false, false, (short)targetState, paths, files, out newState)); } return((EProcessState)newState); }
public void ExtractRules( [Parameter("Path to the generated rules extract file")] string rulesFile, [Parameter("Path to the extract log file; if not specified, defaults to same path " + "and name as extract file.", DefaultValue = null)] string logFile, [Parameter("Format in which to extract rules", Since = "11.1.2.2", DefaultValue = ERulesFormat.Native)] ERulesFormat rulesFormat) { if (logFile == null || logFile == "") { logFile = Path.ChangeExtension(rulesFile, ".log"); } // Ensure rulesFile and logFile are writeable locations FileUtilities.EnsureFileWriteable(rulesFile); FileUtilities.EnsureFileWriteable(logFile); #if HFM_11_1_2_2 HFM.Try("Extracting rules", () => HsvRulesLoad.ExtractCalcRulesEx(rulesFile, logFile, (int)rulesFormat)); #else HFM.Try("Extracting rules", () => HsvRulesLoad.ExtractCalcRules(rulesFile, logFile)); #endif }
public void LoadSecurity( [Parameter("Path to the source security extract file")] string securityFile, [Parameter("Path to the load log file; if not specified, defaults to same path " + "and name as the source security file.", DefaultValue = null)] string logFile, LoadOptions options) { object oWarnings = null; if (logFile == null || logFile == "") { logFile = Path.ChangeExtension(securityFile, ".log"); } // Ensure security file exists and logFile is writeable FileUtilities.EnsureFileExists(securityFile); FileUtilities.EnsureFileWriteable(logFile); HFM.Try("Loading security", () => HsvSecurityLoad.Load(securityFile, logFile, out oWarnings)); if ((bool)oWarnings) { _log.Warn("Security load resulted in warnings; check log file for details"); // TODO: Should we show the warnings here? } }
/// <summary> /// Loads the document cache with info about all documents, keyed by /// their folder paths. /// </summary> protected void LoadCache(string path, bool recurse) { object oNames = null, oDescs = null, oTimestamps = null, oSecurityClasses = null, oIsPrivate = null, oFolderContentTypes = null, oDocOwners = null, oFileTypes = null, oDocTypes = null; string[] names, descs, docOwners; int[] securityClasses; double[] timestamps; int[] isPrivate; EDocumentType[] folderContentTypes, docTypes; EDocumentFileType[] fileTypes; var docs = new List <DocumentInfo>(); if (path == null || path.Length == 0) { path = @"\"; } HFM.Try(string.Format("Retrieving details of all documents at {0}", path), () => oNames = _documents.EnumDocumentsEx(path, EDocumentType.All, EDocumentFileType.All, false, 0, 0, (int)EPublicPrivate.Both, ref oDescs, ref oTimestamps, ref oSecurityClasses, ref oIsPrivate, ref oFolderContentTypes, ref oDocOwners, ref oFileTypes, ref oDocTypes)); names = HFM.Object2Array <string>(oNames); descs = HFM.Object2Array <string>(oDescs); timestamps = HFM.Object2Array <double>(oTimestamps); securityClasses = HFM.Object2Array <int>(oSecurityClasses); isPrivate = HFM.Object2Array <int>(oIsPrivate); folderContentTypes = HFM.Object2Array <EDocumentType>(oFolderContentTypes); docOwners = HFM.Object2Array <string>(oDocOwners); fileTypes = HFM.Object2Array <EDocumentFileType>(oFileTypes); docTypes = HFM.Object2Array <EDocumentType>(oDocTypes); for (var i = 0; i < names.Length; ++i) { docs.Add(new DocumentInfo() { Name = names[i], Folder = path, Description = descs[i], DocumentType = docTypes[i], DocumentFileType = fileTypes[i], Timestamp = DateTime.FromOADate(timestamps[i]), IsPrivate = isPrivate[i] != 0, DocumentOwner = docOwners[i], SecurityClass = securityClasses[i], FolderContentType = folderContentTypes[i] }); if (recurse && docTypes[i] == EDocumentType.Folder) { // Recurse into sub-directory LoadCache(AddFolderToPath(path, names[i]), recurse); } } _documentCache[path] = docs; }
/// Monitors the progress of an EA extract private void MonitorEAExtract(IOutput output) { int errorCode = 0; bool isRunning = false; double numComplete = 0; double numRecords = 0; var status = EA_TASK_STATUS_FLAGS.EA_TASK_STATUS_INITIALIZING; var taskStatus = EEATaskStatus.Initializing; var lastTaskStatus = EEATaskStatus.Initializing; output.InitProgress(taskStatus.ToString()); var pm = new ProgressMonitor(output); pm.MonitorProgress((bool cancel, out bool running) => { HFM.Try("Retrieving task status", () => HsvStarSchemaACM.GetAsynchronousTaskStatus(out status, out numRecords, out numComplete, out isRunning, out errorCode)); taskStatus = (EEATaskStatus)status; running = isRunning; if (cancel && running) { HFM.Try("Cancelling task", () => HsvStarSchemaACM.QuitAsynchronousTask()); } else if (taskStatus != lastTaskStatus) { switch (taskStatus) { case EEATaskStatus.Complete: case EEATaskStatus.CompleteWithErrors: case EEATaskStatus.Cancelled: break; default: _log.InfoFormat("Extract Status: {0} complete", lastTaskStatus); output.Operation = taskStatus.ToString(); break; } lastTaskStatus = taskStatus; } return((int)(numComplete / numRecords * 100)); }); output.EndProgress(); switch (taskStatus) { case EEATaskStatus.Complete: _log.Info("Star schema extract completed successfully"); break; case EEATaskStatus.CompleteWithErrors: _log.Error("Star schema extract completed with errors"); throw new HFMException(errorCode); case EEATaskStatus.Cancelled: _log.Warn("Star schema extract was cancelled"); break; } }
public void DeleteDataExtractTemplate( [Parameter("The name of the template to delete")] string templateName) { HFM.Try("Deleting data extract template {0}", templateName, () => HsvStarSchemaTemplates.DeleteTemplate(templateName)); _log.InfoFormat("Data extract template {0} deleted", templateName); }
internal Connection(Client client, string domain, string userName, string password) { Client = client; _domain = domain; _userName = userName; _password = password; HFM.Try("Setting logon credentials via username and password", () => Client.HsxClient.SetLogonInfoSSO(domain, userName, null, password)); }
public void EnumDataExtractTemplates(IOutput output) { object oNames = null; HFM.Try("Retrieving EA templates", () => oNames = HsvStarSchemaTemplates.EnumTemplates()); var names = HFM.Object2Array <string>(oNames); output.WriteEnumerable(names, "Template Name"); }
public string GetSystemFolder( IOutput output) { string folder = null; HFM.Try("Retrieving system folder", () => folder = HsxServer.GetSystemFolder()); output.WriteSingleValue(folder, "System Folder"); return(folder); }
/// <summary> /// Returns true if the user has access sufficient to perform the specified task. /// </summary> public void CheckPermissionFor(ETask task) { bool allowed = false; HFM.Try("Checking task permission", () => HsvSecurity.IsConnectedUserAllowedToPerformTask((int)task, out allowed)); if (!allowed) { throw new AccessDeniedException(string.Format("You do not have permission to perform {0}", task)); } }
public void DeleteApplication( [Parameter("The name of the cluster from which to delete the application", Alias = "ClusterName")] string cluster, [Parameter("The name of the application to be deleted", Alias = "AppName")] string application) { HFM.Try(string.Format("Deleting application {0} on {1}", application, cluster), () => Client.HsxClient.DeleteApplication(cluster, "Financial Management", application)); }
public int DeleteDocuments( [Parameter("The path to the folder from which to delete documents")] string path, [Parameter("The name of the document(s) to delete; may include wildcards ? and *")] string name, [Parameter("Set to true to delete matching documents in sub-folders as well", DefaultValue = false)] bool includeSubFolders, [Parameter("The document type(s) to delete; use All to include all documents that " + "match the name, path, and any other criteria", DefaultValue = EDocumentType.All)] EDocumentType documentType, [Parameter("Filter documents to be deleted to public, private or both", DefaultValue = EPublicPrivate.Both)] EPublicPrivate visibility, IOutput output) { int count = 0; List <DocumentInfo> docs = EnumDocuments(path, name, includeSubFolders, documentType, visibility, null); docs.Reverse(); // So we delete folder content before folders var paths = new string[1]; var names = new string[1]; if (docs.Count > 1) { output.InitProgress("Deleting documents", docs.Count); } foreach (var doc in docs) { paths[0] = doc.Folder; names[0] = doc.Name; HFM.Try("Deleting document {0}", doc.Name, () => _documents.DeleteDocuments(paths, names, (int)doc.DocumentType, (int)doc.DocumentFileType, false)); count++; if (output.IterationComplete()) { break; } if (doc.DocumentType == EDocumentType.Folder) { _documentCache.Remove(AddFolderToPath(doc.Folder, doc.Name)); } } output.EndProgress(); // Update cache LoadCache(path, includeSubFolders); _log.InfoFormat("Successfully deleted {0} documents", count); return(count); }
/// <summary> /// Returns true if the user has the specified role assigned (directly /// or indirectly through group membership). /// </summary> public void CheckRole(ERole role) { bool allowed = false; HFM.Try("Checking role permission", () => HsvSecurity.IsConnectedUserInRole((int)role, out allowed)); if (!allowed) { throw new AccessDeniedException(string.Format("You have not been assigned the role {0}", role)); } }
/// Returns a bit-field representing the calculation status for a subcube internal int GetCalcStatus(POV pov) { int status = -1; int valueId = pov.IsSpecified(EDimension.Value) ? pov.Value.Id : pov.Entity.DefaultCurrencyId; HFM.Try("Retrieving calc status for {0}", pov, () => _hsvData.GetCalcStatus(pov.Scenario.Id, pov.Year.Id, pov.Period.Id, pov.Entity.Id, pov.Entity.ParentId, valueId, out status)); return(status); }
protected override EProcessState SetProcessState(POV pu, EProcessAction action, EProcessState targetState, string annotation, string[] paths, string[] files) { short newState = 0; HFM.Try("Setting process unit state for {0}", pu, () => _hsvProcessFlow.ProcessManagementChangeStateForMultipleEntities2( pu.Scenario.Id, pu.Year.Id, pu.Period.Id, new int[] { pu.Entity.Id }, new int[] { pu.Entity.ParentId }, pu.Value.Id, annotation, (int)action, false, false, (short)targetState, paths, files, out newState)); return((EProcessState)newState); }
protected override void GetHistory(POV pu, IOutput output) { object oDates = null, oUsers = null, oActions = null, oStates = null, oAnnotations = null, oPaths = null, oFiles = null; HFM.Try("Retrieving process history for {0}", pu, () => _hsvProcessFlow.GetHistory2(pu.Scenario.Id, pu.Year.Id, pu.Period.Id, pu.Entity.Id, pu.Entity.ParentId, pu.Value.Id, out oDates, out oUsers, out oActions, out oStates, out oAnnotations, out oPaths, out oFiles)); OutputHistory(output, pu, oDates, oUsers, oActions, oStates, oAnnotations, oPaths, oFiles); }
public void ClearAllData( SystemInfo si, IOutput output) { output.InitProgress("Clear Data"); HFM.Try("Clearing all data", () => { si.MonitorBlockingTask(output); _hsvData.ClearAllData(); si.BlockingTaskComplete(); }); _log.Info("Application data cleared"); output.EndProgress(); }