internal bool TryGetMeasureDimensionNodesForRole(string currentLanguage, string currentLabelRole, string roleId, Taxonomy tax, out DimensionNode topLevelNode) { topLevelNode = null; DefinitionLink dl = definitionLinks[roleId] as DefinitionLink; if (dl != null) { string label = dl.Title; topLevelNode = new DimensionNode(label); topLevelNode.MyDefinitionLink = dl; if (dl.HypercubeLocatorsHrefs.Count > 0) { foreach (string hypercubeHref in dl.HypercubeLocatorsHrefs) { List <DimensionNode> dns; dl.BuildMeasureElementTreeForHypercubeId(tax, hypercubeHref, false, out dns); if (dns != null) { foreach (DimensionNode dn in dns) { topLevelNode.AddChild(dn); } } } } } return(topLevelNode != null); }
public string GetDefinition(string taxonomy, string elementID) { taxonomy = Path.GetFileName(taxonomy); if (!this.Taxonomies.ContainsKey(taxonomy)) { return(string.Empty); } if (!definitionCache.ContainsKey(this.Language)) { definitionCache[this.Language] = new Dictionary <string, Dictionary <string, string> >(); } if (!definitionCache[this.Language].ContainsKey(taxonomy)) { string basePath = Path.Combine(GetBasePath(), DefinitionPath); Dictionary <string, string> tmp = definitionCache[this.Language][taxonomy] = new Dictionary <string, string>(); foreach (string definitionFile in this.Taxonomies[taxonomy].DefinitionFiles) { string err; string file = Path.Combine(basePath, definitionFile); if (!Taxonomy.TryGetDocumentationInformation(this.Language, file, ref tmp, out err)) { } } } if (definitionCache[this.Language][taxonomy].ContainsKey(elementID)) { return(definitionCache[this.Language][taxonomy][elementID]); } return(string.Empty); }
public string GetReferences(string taxonomy, string elementID) { taxonomy = Path.GetFileName(taxonomy); if (!this.Taxonomies.ContainsKey(taxonomy)) { return(string.Empty); } if (!referenceCache.ContainsKey(this.Language)) { referenceCache[this.Language] = new Dictionary <string, Dictionary <string, string> >(); } if (!referenceCache[this.Language].ContainsKey(taxonomy)) { string basePath = Path.Combine(GetBasePath(), ReferencesPath); Dictionary <string, string> tmp = referenceCache[this.Language][taxonomy] = new Dictionary <string, string>(); foreach (string referenceFile in this.Taxonomies[taxonomy].ReferenceFiles) { string err; string file = Path.Combine(basePath, referenceFile); if (!Taxonomy.TryGetReferenceInformation(file, ref tmp, out err)) { } } } if (referenceCache[this.Language][taxonomy].ContainsKey(elementID)) { return(referenceCache[this.Language][taxonomy][elementID]); } return(string.Empty); }
/// <summary> /// Add a new relationship between two locators... /// </summary> /// <param name="elementId"></param> /// <param name="parentElementId"></param> /// <param name="newLocatorRelationshipInfo"></param> /// <param name="taxonomy"></param> /// <returns></returns> public bool UpdateArc(string elementId, string parentElementId, LocatorRelationshipInfo newLocatorRelationshipInfo, Taxonomy taxonomy) { PresentationLocator parentLocator = locators[parentElementId] as PresentationLocator; if (parentLocator == null && !string.IsNullOrEmpty(parentElementId)) { parentLocator = new PresentationLocator(); parentLocator.href = parentElementId; parentLocator.MyElement = taxonomy.allElements[parentElementId] as Element; locators[parentElementId] = parentLocator; } PresentationLocator childLocator = locators[elementId] as PresentationLocator; if (childLocator == null) { childLocator = new PresentationLocator(); childLocator.href = elementId; childLocator.MyElement = taxonomy.allElements[elementId] as Element; if (parentLocator != null) { childLocator.AddParent(parentLocator); } locators[elementId] = childLocator; } if (parentLocator != null) { if (parentLocator.childLocatorsByHRef == null) { parentLocator.childLocatorsByHRef = new HybridDictionary(); } ChildPresentationLocator cpl = parentLocator.childLocatorsByHRef[elementId] as ChildPresentationLocator; if (cpl != null) { cpl.AddRelationship(newLocatorRelationshipInfo); } else { cpl = new ChildPresentationLocator(elementId, newLocatorRelationshipInfo); parentLocator.childLocatorsByHRef[elementId] = cpl; } } return(true); }
public void TestUsGaap2008_ci_all() { //Trace.Listeners.Add( new TextWriterTraceListener(Console.Out) ); string fileName = @"S:\2009PublicReview\ind\ci\ssss.xsd"; Taxonomy s = new Taxonomy(); int errors = 0; DateTime start = DateTime.Now; if (s.Load(fileName, out errors) != true) { Assert.Fail((string)s.ErrorList[0]); } s.Parse(out errors); Assert.IsNotNull((s.allElements["dei_DocumentType"] as Element).EnumData, "enum data should not be null"); }
/// <summary> /// build the validation information so that we can apply it to markups... /// </summary> /// <param name="tax"></param> internal void BuildDimensionValidationInformation(Taxonomy tax) { if (tax.CurrentLanguage == null) { if (tax.SupportedLanguages.Count == 0) { tax.CurrentLanguage = "en"; } else { tax.CurrentLanguage = tax.SupportedLanguages[0] as string; } } if (tax.currentLabelRole == null) { tax.currentLabelRole = PresentationLocator.preferredLabelRole; } foreach (DefinitionLink dl in this.definitionLinks.Values) { if (dl.ElementHypercubeRelationships != null) { continue; } dl.BuildDimensionValidationInformation(tax, definitionLinks); //determine if there are any dimensions without defaults.. //if there are no dimensions without default then we //need not validate any of the markups that does not have any //segment/ scenario information.... if (!HasDimensionsWithoutDefault) { HasDimensionsWithoutDefault = dl.HasDimensionWithoutDefault(); } } }
/// <summary> /// /// </summary> /// <param name="tax"></param> /// <param name="curNode"></param> /// <returns></returns> public DimensionNode GetMemberUsingTargetRole(Taxonomy tax, DimensionNode curNode) { if (curNode.NodeDimensionInfo.TargetRole == null) { return(curNode); } DimensionNode topLevelNode; if (TryGetDimensionNodeForRole(tax.CurrentLanguage, tax.CurrentLabelRole, curNode.NodeDimensionInfo.TargetRole, out topLevelNode)) { Node ret = topLevelNode.GetChildNode(curNode.parent.Id, curNode.Id); if (ret != null) { return(ret as DimensionNode); } } return(curNode); }
public void TestUkCompanies_Instances() { Taxonomy tax = new Taxonomy(); int errors = 0; DateTime start = DateTime.Now; if ( tax.Load( COMPANIES_HOUSE_FILE, out errors ) != true ) { Assert.Fail( (string)tax.ErrorList[0]); } errors = 0; // this loads up all dependant taxonomies, and loads the corresponding presentation, calculation, label, and reference linkbases // parse presentation first tax.CurrentLabelRole ="preferredLabel"; tax.CurrentLanguage = "en"; tax.Parse( out errors ); Assert.AreEqual( 0, errors, "should not have any errors"); Hashtable prefixXRef = new Hashtable(); prefixXRef["ae"] = "uk-gaap-ae"; prefixXRef["pt"] = "uk-gaap-pt"; prefixXRef["gc"] = "uk-gcd"; ValidateInstanceDoc( tax, DAMC_INST_FILE, 0,prefixXRef ); ValidateInstanceDoc( tax, DAMC_INST_FILE_Missing_Item, 2, prefixXRef); ValidateInstanceDoc( tax, DAM_INST_FILE, 0,prefixXRef ); ValidateInstanceDoc( tax, DAM_INST_FILE_Missing_Item, 1,prefixXRef ); }
public void TestValidateTaxonomyRecursively() { Taxonomy tx = new Taxonomy(); int errors = 0; DateTime start = DateTime.Now; Assert.AreEqual( true, tx.Load( US_GAAP_FILE, out errors ), "Could not load US GAAP File" ); Assert.AreEqual( 0, errors ); Console.WriteLine("=========================="); ValidationStatus VS = tx.Validate(); Console.WriteLine("Number of Errros: " + tx.ValidationErrors.Count); Console.WriteLine("Number of Warnings: " + tx.ValidationWarnings.Count); Console.WriteLine("Validation Status: " + VS.ToString()); if (tx.ValidationWarnings.Count > 0) { System.Collections.IEnumerator vwarnings = tx.ValidationWarnings.GetEnumerator(); while ( vwarnings.MoveNext() ) Console.WriteLine(" Warning > " + vwarnings.Current); } if (tx.ValidationErrors.Count > 0) { System.Collections.IEnumerator verrors = tx.ValidationErrors.GetEnumerator(); while ( verrors.MoveNext() ) Console.WriteLine(" Error > " + verrors.Current); } Console.WriteLine("=========================="); }
/// <summary> /// Build the element hypercube relationship info to be able to validate /// markups /// </summary> /// <param name="tax"></param> /// <param name="definisionLinks"></param> internal void BuildDimensionValidationInformation( Taxonomy tax, Hashtable definisionLinks ) { if (ElementHypercubeRelationships != null) return; ElementHypercubeRelationships = new List<ElementHypercubeRelationhipInfo>(); if (MeasureLocatorsHrefs == null || MeasureLocatorsHrefs.Count == 0 ) return; foreach (string measureId in MeasureLocatorsHrefs) { DefinitionLocator dloc; if (!this.TryGetLocator(measureId, out dloc)) continue; DimensionNode measureNode = dloc.CreateDimensionNode(tax.currentLanguage, tax.currentLabelRole, null, measureId, this, true, definisionLinks, null, null, false, false); if (measureNode.children != null) { List<string> memberList = new List<string>(); //this list would contain the hypercubes as well ,but we will remove it... //later RecursivelyBuildMemberList(measureNode, ref memberList); //build list of children measures... foreach (DimensionNode dn in measureNode.children) { DefinitionLink targetLink = this; if (dn.NodeDimensionInfo.NodeType == DimensionNode.NodeType.Hypercube) { memberList.Remove(dn.Id); if (!string.IsNullOrEmpty(dn.NodeDimensionInfo.TargetRole)) { targetLink = definisionLinks[dn.NodeDimensionInfo.TargetRole] as DefinitionLink; if (targetLink == null) { targetLink = this; } } //build the hypercube with its dimension children separately.. //as this might have got built earlier when the tax object was used //or if multiple element uses the same hypercube... this does not get //rebuilt every time the hypercube is used... DimensionNode hypercubeNode; if (targetLink.TryGetHypercubeNode(tax.currentLanguage, tax.currentLabelRole, definisionLinks, dn.Id,false, out hypercubeNode)) { ElementHypercubeRelationhipInfo info = new ElementHypercubeRelationhipInfo(); info.IsAll = dn.NodeDimensionInfo.IsAllRelationShip; info.IsClosed = dn.NodeDimensionInfo.IsClosed; info.IsSegment = !dn.NodeDimensionInfo.IsScenario; info.ParentBaseSet = this; info.ParentTaxonomyObj = tax; info.HypercubeId = dn.Id; info.ElementIdList = memberList; if (hypercubeNode.children != null) { foreach (DimensionNode dimNode in hypercubeNode.children) { info.DimensionsById[dimNode.Id] = dimNode; } } ElementHypercubeRelationships.Add(info); } } } } } }
/// <summary> /// /// </summary> /// <param name="tax"></param> /// <param name="hypercubeId"></param> /// <param name="buildHyperubeChildren"></param> /// <param name="dimNodes"></param> public void BuildMeasureElementTreeForHypercubeId(Taxonomy tax, string hypercubeId, bool buildHyperubeChildren, out List<DimensionNode> dimNodes) { dimNodes = new List<DimensionNode>(); string curLang = tax.currentLanguage; string curLabelRole = tax.currentLabelRole; if (curLang == null) { curLang = "en"; } if (curLabelRole == null) { curLabelRole = PresentationLocator.preferredLabelRole; } //determine all the measure element parents of the hypercube element id foreach (string measureId in this.MeasureLocatorsHrefs) { DefinitionLocator dloc; if (!this.TryGetLocator(measureId, out dloc)) continue; if (dloc.childLocatorsByHRef == null) continue; bool add = false; foreach (string key in dloc.childLocatorsByHRef.Keys) { if (key.Equals(hypercubeId)) { ChildDefinitionLocator lri = dloc.childLocatorsByHRef[hypercubeId] as ChildDefinitionLocator; foreach (DefinitionLocatorRelationshipInfo dri in lri.LocatorRelationshipInfos) { if (!dri.IsProhibited) { add = true; } } break; } } if (add ) { DefinitionLocatorRelationshipInfo parentDRI = new DefinitionLocatorRelationshipInfo(DimensionNode.NodeType.Item); DimensionNode dimNode = dloc.CreateDimensionNode(curLang, curLabelRole, null, measureId, this, true, tax.NetDefinisionInfo.DefinitionLinks, parentDRI, null, buildHyperubeChildren, IsScenarioHypercube(hypercubeId, tax.NetDefinisionInfo.DefinitionLinks)); if (dimNode != null) { dimNodes.Add(dimNode); } } } return; }
public void TestLoadICI_RR_Taxonomy_WithPrompt() { Taxonomy iciTaxonomy = new Taxonomy(); int numErrors; Assert.IsTrue(iciTaxonomy.Load(ICI_RR_SCHEMAFILE, out numErrors), "Failed to load ICI-RR Taxonomy. " + numErrors + " errors were found"); Assert.IsTrue(iciTaxonomy.Parse(out numErrors), "Failed to parse the ICI-RR Taxonomy: " + numErrors + " errors were found"); }
private void MergeRoleTypes( Taxonomy tax ) { if (tax.roleTypes != null) { if (this.roleTypes == null) { roleTypes = tax.roleTypes; } else { foreach (KeyValuePair<string, RoleType> kvp in tax.roleTypes) { if (!this.roleTypes.ContainsKey(kvp.Key)) { roleTypes[kvp.Key] = kvp.Value; } } } } }
/// <summary> /// A method to wrap the call to the event delegate for telling the outside world if /// which file is being processed on the load. It gets called to provide some progress status /// report of what is happening with the load. This wrapper checks if the ProcessingFileChanged /// event handler is not null and then calls the delegate. If it is null, nothing happens. /// </summary> /// <param name="tax"></param> /// <param name="e"></param> public static void OnProcessingFileChanged( Taxonomy tax, ProcessingFileChangedEventArgs e ) { if ( ProcessingFileChanged != null ) { ProcessingFileChanged( tax, e ); } }
/// <summary> /// Loads relevant information from this.<see cref="currentInstance"/> and retains auxiliary files from this.<see cref="currentReportDirectory"/>. /// </summary> private void BuildFilingSummary() { InstanceStatistics currentInstanceStat = InstanceUtils.GetStatisticsFromInstance( this.currentInstance ); this.currentFilingSummary.ReportFormat = this.ReportFormat; this.currentFilingSummary.Version = this.currentAssemblyVersion; //Deprecated - 2.4.0.2 //myFilingSummary.FilingDate = FilingDate; //myFilingSummary.PeriodEndDate = PeriodEnding; //myFilingSummary.TickerSymbol = TickerSymbol; //myFilingSummary.AccessionNumber = AccessionNumber; //myFilingSummary.FiscalYearEnd = FiscalYearEnd; //add the filing package files to the filing summary //OR copy the resource into the reports directory string instPath = Path.GetDirectoryName( this.currentInstancePath ); bool reportDirectoryExists = Directory.Exists( this.currentReportDirectory ); if( Directory.Exists( instPath ) ) { List<string> localTaxonomyFiles = new List<string>(); if( this.currentTaxonomy != null && this.currentTaxonomy.LinkbaseFileInfos != null ) { foreach( LinkbaseFileInfo linkInfo in currentTaxonomy.LinkbaseFileInfos ) { if( Path.IsPathRooted( linkInfo.Filename ) && File.Exists( linkInfo.Filename ) ) { string tmpName = Path.GetFileName( linkInfo.Filename ); localTaxonomyFiles.Add( tmpName.ToLower() ); } } } string prefix = Path.GetFileNameWithoutExtension( this.currentInstancePath ); string[] packageFiles = Directory.GetFiles( instPath ); foreach( string path in packageFiles ) { string ext = Path.GetExtension( path ).ToLower(); string file = Path.GetFileName( path ); switch( ext ) { case ".xsd": case ".xml": bool isInstance = false; if( !string.IsNullOrEmpty( this.currentInstancePath ) ) isInstance = this.currentInstancePath.EndsWith( file, StringComparison.CurrentCultureIgnoreCase ); bool isTaxonomy = false; if( !string.IsNullOrEmpty( this.currentTaxonomyPath ) ) isTaxonomy = this.currentTaxonomyPath.EndsWith( file, StringComparison.CurrentCultureIgnoreCase ); bool isTaxonomyReference = localTaxonomyFiles.Contains( file.ToLower() ); if( isInstance || isTaxonomy || isTaxonomyReference ) { //add the filing package files to the filing summary this.currentFilingSummary.InputFiles.Add( file ); } else { goto default; } break; default: if( reportDirectoryExists && FilingSummary.IsEdgarAttachmentFile( file ) ) { //copy the resource into the reports directory string copyTo = Path.Combine( this.currentReportDirectory, file ); FileUtilities.Copy( path, copyTo ); this.currentFilingSummary.SupplementalFiles.Add( file ); } break; } } } this.currentFilingSummary.SetStatistics( currentInstanceStat.NumberOfEntities, currentInstanceStat.NumberOfContexts, currentInstanceStat.NumberOfSegments, currentInstanceStat.NumberOfScenarios, currentInstanceStat.NumberOfUnitRefs, currentInstanceStat.NumberOfElements, currentInstanceStat.HasFootnotes, currentInstanceStat.HasTuples ); int errors = 0; ArrayList dependentTaxonomies = currentTaxonomy.GetDependantTaxonomies( false, out errors ); foreach( string taxonomyName in dependentTaxonomies ) { string name = Path.GetFileNameWithoutExtension( taxonomyName ); //check for old and new GAAP taxonomies //TODO: Exract method "IsIMBased" if( name.IndexOf( "gaap-ci" ) >= 0 || name.IndexOf( "gaap-im" ) >= 0 || name.IndexOf( "usfr-ime" ) >= 0 || name.IndexOf( "usfr-fste" ) >= 0 || name.IndexOf( "gaap-basi" ) >= 0 || name.IndexOf( "gaap-ins" ) >= 0 || name.IndexOf( "gaap-bd" ) >= 0 || name.IndexOf( "gaap-re" ) >= 0 || name.IndexOf( "usfr-ar" ) >= 0 || name.IndexOf( "usfr-mr" ) >= 0 || name.IndexOf( "usfr-seccert" ) >= 0 || name.IndexOf( "usfr-mda" ) >= 0 || name.IndexOf( "cistm" ) >= 0 || name.IndexOf( "mdastm" ) >= 0 || name.IndexOf( "mda-" ) >= 0 || name.IndexOf( "usgaap-" ) >= 0 ) { this.currentFilingSummary.BaseTaxonomies.Add( name ); if( this.currentFilingSummary.BaseTaxonomyFullPath == null ) this.currentFilingSummary.BaseTaxonomyFullPath = taxonomyName; } } this.currentFilingSummary.BaseTaxonomies.Sort(); if( this.currentFilingSummary.BaseTaxonomies.Count == 0 ) //need to go one more level down { foreach( string taxonomyFilePath in dependentTaxonomies ) { Taxonomy t1 = new Taxonomy(); t1.Load( taxonomyFilePath, false ); } } this.currentFilingSummary.HasCalculationLinkbase = currentTaxonomy.HasCalculation; this.currentFilingSummary.HasPresentationLinkbase = currentTaxonomy.HasPresentation; }
/// <summary> /// <para>Performs the "building of reports" based on the <paramref name="instancePath"/> and <paramref name="taxonomy"/> provided.</para> /// <para>As a result, <paramref name="filingSummary"/>.<see>MyReports</see> (<see cref="FilingSummary"/>) will be populated with instances of <see cref="ReportHeader"/>,</para> /// <para>and <paramref name="filingSummary"/>.<see>MyReports</see> will be XML serialized to <paramref name="filingSummaryPath"/></para> /// <para>If an error occurs, <paramref name="error" /> will be populated.</para> /// </summary> /// <param name="instancePath">The path to the instance document.</param> /// <param name="taxonomy">The loaded and parsed taxonomy object.</param> /// <param name="filingSummaryPath">The path where the generated <see cref="FilingSummary"/> object should be saved.</param> /// <param name="reportDirectory">The path where the generated content is saved.</param> /// <param name="filingSummary">The <see cref="FilingSummary"/> object to populate.</param> /// <param name="error">The error message for any critical errors which might occur.</param> /// <returns>True on success or false for fail.</returns> public bool BuildReports( string instancePath, Taxonomy taxonomy, string filingSummaryPath, string reportDirectory, out FilingSummary filingSummary, out string error ) { error = string.Empty; filingSummary = null; DateTime dtStart = DateTime.Now; try { this.currentFilingSummary = new FilingSummary(); this.currentInstancePath = instancePath; this.currentTaxonomy = taxonomy; this.currentReportDirectory = reportDirectory; if( !this.ValidateSettings( out error ) ) return false; //create the reports directory so that defnref can be generated if( !Directory.Exists( this.currentReportDirectory ) ) Directory.CreateDirectory( this.currentReportDirectory ); if( !string.IsNullOrEmpty( this.CurrencyMappingFile ) ) this.LoadCurrencies(); //set up this symbol for reuse throughout InstanceUtils.USDCurrencySymbol = InstanceUtils.GetCurrencySymbolFromCode( InstanceUtils.USDCurrencyCode ); if( string.IsNullOrEmpty( this.preferredLanguage ) ) this.preferredLanguage = this.GetPreferredLanguage(); bool isIMBased = this.CheckIsIMBased(); //DateTime startInstance = DateTime.Now; ArrayList errors = null; if( !InstanceUtils.TryLoadInstanceDocument( this.currentInstancePath, out this.currentInstance, out errors ) ) { string[] arrErrors = new string[ errors.Count ]; for( int i = 0; i < errors.Count; i++ ) { arrErrors[ i ] = ( (ParserMessage)errors[ i ] ).Message; } string instanceDocErrors = string.Join( "\r\n ", arrErrors ); Regex splitPoint = new Regex( @"\S\s+at" ); arrErrors = splitPoint.Split( instanceDocErrors ); instanceDocErrors = arrErrors[ 0 ]; error = "Unable to load the instance document:\r\n " + instanceDocErrors; return false; } ArrayList taxonomies = new ArrayList(); taxonomies.Add( this.currentTaxonomy ); this.currentInstance.FixPrefixInInstanceDocument( taxonomies ); //this.currentFilingSummary.InstanceLoadTime = DateTime.Now - startInstance; //this.currentFilingSummary.FactCount = this.currentInstance.markups.Count; this.PopulateMarkupDictionaries(); this.BuildFilingSummary(); this.FireBuildReportsProcessing(); this.currentFilingSummary.MyReports.Clear(); ArrayList topNodes = currentTaxonomy.GetNodesByPresentation( false, this.ExcludedReports ); foreach( Node topNode in topNodes ) { InstanceReport report = null; try { if( this.BuildReport( topNode, out report ) ) { if( report.IsEmbedReport || report.HasEmbeddedReports ) { this.roleAxes[ report.RoleURI ] = report.AxisByPresentation; this.roleAxisDefaults[ report.RoleURI ] = report.AxisMemberDefaults; this.roleAxisMembers[ report.RoleURI ] = report.AxisMembersByPresentation; } ReportHeader header = this.currentFilingSummary.AddReport( report ); this.ApplyRulesToReport( report ); report.OnRuleProcessing -= this.OnRuleProcessing; report.OnRuleProcessing += this.OnRuleProcessing; report.OnRuleProcessed -= this.OnRuleProcessed; report.OnRuleProcessed += this.OnRuleProcessed; report.UnitDictionary = this.unitDictionary; string fullPath = Path.Combine( this.currentReportDirectory, header.XmlFileName ); report.BuildXMLDocument( fullPath, true, isIMBased, this.currentFilingSummary ); report.UnitDictionary = null; report.OnRuleProcessing -= this.OnRuleProcessing; report.OnRuleProcessed -= this.OnRuleProcessed; if( header.HasEmbeddedReports ) this.reportsWithEmbeds.Add( header ); } } finally { if( report != null ) report.Dispose(); } } //Build Missing Elements (Uncategorized) Report before we flush the markups InstanceReport missingReport; if( this.BuildMissingDataReport( out missingReport ) ) { ReportHeader uncatHeader = this.currentFilingSummary.AddReport( missingReport ); uncatHeader.XmlFileName = InstanceUtils.TOP_LEVEL_REPORT_INDICATOR+ _missingReportIndex +".xml"; } //Free up some resources this.currentInstance = null; if( this.internalReports.Count == 0 ) { this.currentTaxonomy.Close(); this.currentTaxonomy = null; } //clear the dictionaries after checking internalReports this.ClearMarkupDictionaries(); #region Clean up Columns //if the company has filed earning release, do not touch the reports. //Based on request from SEC, do not remove any reports if the base taxonomy is the new GAAP taxonomy (2208) if( !this.HasEarningRelease() ) { //DO NOT REMOVE - These are used in rule processing isGAAP2005 = this.TaxonomyIsGAAP2005(); isNewGAAP = ( !isGAAP2005 ); ProcessFlowThroughColumnsReports(); } #endregion #region Build All Reports //Build book ReportHeader r1 = new ReportHeader(); r1.LongName = _allReports; r1.ShortName = _allReports; r1.ReportType = ReportHeaderType.Book; r1.IsDefault = this.currentFilingSummary.MyReports.Count == 0; this.currentFilingSummary.AddReports( r1 ); #endregion #region Process Embeeded Reports bool isRuleEnabled = this.FireRuleProcessing( RulesEngineUtils.EMBED_REPORTS_RULE ); if( isRuleEnabled && this.reportsWithEmbeds.Count > 0 ) { List<ReportHeader> embedReports = this.ProcessEmbeddedReports(); if( embedReports.Count > 0 ) { foreach( ReportHeader embedReport in embedReports ) { if( this.currentFilingSummary.MyReports.Contains( embedReport ) ) { this.currentFilingSummary.MyReports.Remove( embedReport ); string reportNameToDelete = Path.Combine( this.currentReportDirectory, embedReport.XmlFileName ); if( File.Exists( reportNameToDelete ) ) File.Delete( reportNameToDelete ); } } } this.FireRuleProcessed( RulesEngineUtils.EMBED_REPORTS_RULE ); } #endregion #region Generate Excel Workbook isRuleEnabled = this.FireRuleProcessing( RulesEngineUtils.GENERATE_EXCEL_RULE ); if( isRuleEnabled ) { string excelReportName = "Financial_Report"; bool processed = ExcelUtility.GenerateExcelWorkbook( this.currentFilingSummary, this.currentReportDirectory, excelReportName ); this.FireRuleProcessed( RulesEngineUtils.GENERATE_EXCEL_RULE ); if( !processed ) { error = "Failed to generate Excel Workbook for report: " + this.currentReportDirectory + " " + excelReportName; return false; } } #endregion #region Generate Output Formats if( ( this.ReportFormat & ReportFormat.Html ) == ReportFormat.Html ) { if( !this.GenerateHtmlFiles() ) { //The error was logged to the tracer } } if( ( this.ReportFormat & ReportFormat.Xml ) == ReportFormat.Xml ) { //this format already exists } else { // this.currentFilingSummary.TraceInformation( "Information: Report Formart does not include 'Xml'. Removing references and files." ); foreach( ReportHeader rh in this.currentFilingSummary.MyReports ) { if( !string.IsNullOrEmpty( rh.HtmlFileName ) ) { string deleteFile = Path.Combine( this.currentReportDirectory, rh.XmlFileName ); if( File.Exists( deleteFile ) ) File.Delete( deleteFile ); rh.XmlFileName = null; } } } #endregion #region Check and fix default report //Recheck the default report just in case it was deleted bool foundDefault = false; foreach( ReportHeader header in this.currentFilingSummary.MyReports ) { if( header.IsDefault ) { foundDefault = true; break; } } if( !foundDefault ) { foreach( ReportHeader header in this.currentFilingSummary.MyReports ) { if( header.IsBalanceSheet() || header.ReportType == ReportHeaderType.Book ) { header.IsDefault = true; break; } } } #endregion return true; } catch( Exception ex ) { error = string.Format( "Exception thrown in BuildReports: {0}", ex.Message ); return false; } finally { this.FireBuildReportsProcessed(); this.currentFilingSummary.ProcessingTime = DateTime.Now - dtStart; this.currentFilingSummary.SaveAsXml( filingSummaryPath ); filingSummary = this.currentFilingSummary; this.currentFilingSummary = null; if( this.currentTaxonomy != null ) this.currentTaxonomy.Close(); this.currentTaxonomy = null; } }
/// <summary> /// Add a new relationship between two locators... /// </summary> /// <param name="elementId"></param> /// <param name="parentElementId"></param> /// <param name="newLocatorRelationshipInfo"></param> /// <param name="taxonomy"></param> /// <returns></returns> public bool UpdateArc(string elementId, string parentElementId , LocatorRelationshipInfo newLocatorRelationshipInfo, Taxonomy taxonomy ) { PresentationLocator parentLocator = locators[parentElementId] as PresentationLocator; if (parentLocator == null && !string.IsNullOrEmpty(parentElementId)) { parentLocator = new PresentationLocator(); parentLocator.href = parentElementId; parentLocator.MyElement = taxonomy.allElements[parentElementId] as Element; locators[parentElementId] = parentLocator; } PresentationLocator childLocator = locators[elementId] as PresentationLocator; if (childLocator == null ) { childLocator = new PresentationLocator(); childLocator.href = elementId; childLocator.MyElement = taxonomy.allElements[elementId] as Element; if (parentLocator != null) { childLocator.AddParent(parentLocator); } locators[elementId] = childLocator; } if (parentLocator != null) { if (parentLocator.childLocatorsByHRef == null) { parentLocator.childLocatorsByHRef = new HybridDictionary(); } ChildPresentationLocator cpl = parentLocator.childLocatorsByHRef[elementId] as ChildPresentationLocator; if (cpl != null) { cpl.AddRelationship(newLocatorRelationshipInfo); } else { cpl = new ChildPresentationLocator(elementId, newLocatorRelationshipInfo); parentLocator.childLocatorsByHRef[elementId] = cpl; } } return true; }
public static bool TryLoadTaxonomy( string taxonomyPath, RequestCacheLevel remoteFileCachePolicy, //XmlCatalogResolver xmlCatalog, out Taxonomy currentTaxonomy, out int numberErrors, out string errorMsg) { errorMsg = string.Empty; currentTaxonomy = new Taxonomy(); currentTaxonomy.PromptUser = false; currentTaxonomy.CachePolicy = remoteFileCachePolicy; //currentTaxonomy.XmlCatalog = xmlCatalog; try { numberErrors = currentTaxonomy.Load(taxonomyPath, false); if (numberErrors == 0) { currentTaxonomy.Parse(out numberErrors); } } catch (XPathException) { numberErrors = 1; errorMsg = "Error parsing the taxonomy: Unable to find one or more of the dependent taxonomy files for taxonomy " + taxonomyPath; return false; } // ignore calc linkbase errors - don'care if (numberErrors != 0 && currentTaxonomy.ErrorList.Count > 0) { Console.WriteLine( " Pres Errors: " + currentTaxonomy.NumPresErrors ); Console.WriteLine( " Calc Errors: " + currentTaxonomy.NumCalcErrors ); Console.WriteLine( " Label Errors: " + currentTaxonomy.NumLabelErrors ); Console.WriteLine( " Reference Errors: " + currentTaxonomy.NumReferenceErrors ); currentTaxonomy.ErrorList.Sort(); try { foreach ( ParserMessage pm in currentTaxonomy.ErrorList ) { if ( pm.Level != TraceLevel.Error ) { break; // all the errors should be first after sort } errorMsg += pm.Message + Environment.NewLine; Console.WriteLine( pm.Level.ToString() + ": " + pm.Message ); } errorMsg = "Error parsing the taxonomy: "+ errorMsg.Trim(); } //Do nothing. Error wasn't written to the event log. catch { } // don't care about calc errors - if it's anything else, bomb out if ( numberErrors != currentTaxonomy.NumCalcErrors ) { return false; } } return true; }
/// <summary> /// Crawls <paramref name="currentNode"/> recursively checking for <see cref="Node"/>s where <paramref name="currentNode"/>.<see>MyElement</see>.<see>IsDimensionItem</see>"/> is true. /// </summary> /// <param name="currentTaxonomy">The <see cref="Taxonomy"/> containing <paramref name="currentNode"/>, used to look up the default member of an axis.</param> /// <param name="currentNode">The current <see cref="Node"/>. When called, this should be a top-level node from the presentation taxonomy.</param> /// <param name="axisName">The axis for any dimensions found. Null or empty until a dimension is found.</param> /// <param name="axisByPresentation">A list of axes to be populated.</param> /// <param name="axisMembersByPresentation">A list of dimensions to be populated, grouped and key by axis.</param> private static void GetDimensions( Taxonomy currentTaxonomy, Node currentNode, string axisName, ref List<string> axisByPresentation, ref Dictionary<string, List<Segment>> axisMembersByPresentation ) { if( string.IsNullOrEmpty( axisName ) ) { if( currentNode.MyElement.IsDimensionItem() ) { GetDimensions( currentTaxonomy, currentNode, currentNode.Id, ref axisByPresentation, ref axisMembersByPresentation ); } else if( currentNode.HasChildren ) { foreach( Node childNode in currentNode.Children ) { GetDimensions( currentTaxonomy, childNode, null, ref axisByPresentation, ref axisMembersByPresentation ); } } } else { //this is a dimension // - add it to the list if( !axisByPresentation.Contains( axisName ) ) axisByPresentation.Add( axisName ); // - collect the members in order if( !axisMembersByPresentation.ContainsKey( axisName ) ) axisMembersByPresentation[ axisName ] = new List<Segment>(); string defaultMember = null; if( currentTaxonomy != null && currentTaxonomy.NetDefinisionInfo != null ) currentTaxonomy.NetDefinisionInfo.TryGetDefaultMember( axisName, out defaultMember ); if( currentNode.HasChildren ) { foreach( Node childNode in currentNode.Children ) { Segment newSeg = new Segment { DimensionInfo = new ContextDimensionInfo { dimensionId = axisName, Id = childNode.Id }, ValueName = childNode.Label, ValueType = axisName }; //always store the default member if( string.Equals( defaultMember, childNode.Id ) ) newSeg.IsDefaultForEntity = true; axisMembersByPresentation[ axisName ].Add( newSeg ); if( childNode.HasChildren ) GetDimensions( currentTaxonomy, childNode, axisName, ref axisByPresentation, ref axisMembersByPresentation ); } } } }
private void ValidateInstanceDoc( Taxonomy tax, string fileName, int countErrors, Hashtable prefixXRef ) { Instance ins = new Instance(); ArrayList errs; if( !ins.TryLoadInstanceDoc( fileName, out errs )) { Assert.Fail( "Failed to load instance document" + fileName); } foreach( MarkupProperty mp in ins.markups ) { if ( prefixXRef[mp.elementPrefix] != null ) { string realPrefix = prefixXRef[mp.elementPrefix] as string; mp.elementPrefix = realPrefix; mp.elementId = string.Format(DocumentBase.ID_FORMAT, mp.elementPrefix, mp.elementName); } } string[] validationErrors; tax.ValidateInstanceInformationForRequiresElementCheck( ins, out validationErrors ); Assert.IsNotNull( validationErrors , "Validation errors object should not be null"); foreach( string str in validationErrors ) { Console.WriteLine( str ); } Assert.AreEqual( countErrors, validationErrors.Length,"Failed to ValidateInstanceInformationForRequiresElementCheck"); }
private void MergeLinkbaseFileInfos(Taxonomy tax) { this.linkbaseFileInfos.AddRange(tax.linkbaseFileInfos); }
private void MergeRoleRefsFromTaxonomy( Taxonomy depTax ) { foreach ( KeyValuePair<string, RoleRef> kvp in depTax.roleRefs ) { RoleRef rr; if ( this.roleRefs.TryGetValue( kvp.Key, out rr ) ) { rr.MergeFileReferences( kvp.Value.GetFileReferences() ); } else { roleRefs[kvp.Key] = kvp.Value; } } }
/// <summary> /// Clones all the information that the taxonomy needs so that this taxonomy can /// be used as a dependent taxonomy and can get imported into the other taxonomy /// when a taxonomy gets imported, its pointers gets mixed with the pointers in the main taxonomy /// and hence its information gets modified. That is why we need to clone the taxonomy before /// merging it with another taxonomy. /// /// Some of the information is not deep copied as the information is not affected by /// merging. /// </summary> /// <returns>The cloned <see cref="Taxonomy"/>.</returns> public Taxonomy CopyTaxonomyForMerging() { lock (this) { Taxonomy clone = new Taxonomy(); clone.infos = new List<TaxonomyItem>(this.infos.Count); for (int i = 0; i < this.infos.Count; i++) { clone.infos.Add(this.infos[i].CloneTaxonomyItem()); } clone.HasPresentation = this.HasPresentation; clone.HasCalculation = this.HasPresentation; clone.nsPrefix = this.nsPrefix; clone.targetNamespace = this.targetNamespace; clone.supportedLanguages.AddRange(this.supportedLanguages); if (this.allFiles != null) { clone.allFiles.AddRange(this.allFiles); } clone.labelRoles.AddRange(this.labelRoles); clone.currentLanguage = this.currentLanguage; clone.currentLabelRole = this.currentLabelRole; clone.aucentExtension = this.aucentExtension; clone.PromptUser = this.PromptUser; clone.schemaFilename = this.schemaFilename; clone.schemaFile = this.schemaFile; clone.schemaPath = this.schemaPath; clone.schemaPathURI = this.schemaPathURI; #region Copy Label if (this.labelHrefHash != null) { clone.tmplabelTable = new Hashtable(); foreach (LabelLocator ll in this.labelHrefHash.Values) { clone.tmplabelTable[ll.Label] = ll.CreateCopyForMerging(); } } #endregion #region Copy Calculation if (this.calculationInfo != null) { clone.calculationInfo = new Hashtable(); foreach (DictionaryEntry de in this.calculationInfo) { PresentationLink pl = de.Value as PresentationLink; clone.calculationInfo[de.Key] = pl.CreateCopyForMerging(); } } #endregion #region Copy Presentation if (this.presentationInfo != null) { clone.presentationInfo = new Hashtable(); foreach (DictionaryEntry de in this.presentationInfo) { PresentationLink pl = de.Value as PresentationLink; clone.presentationInfo[de.Key] = pl.CreateCopyForMerging(); } } #endregion #region Copy Definition if (this.netDefinisionInfo != null) { clone.netDefinisionInfo = netDefinisionInfo.CreateCopyForMerging(); } #endregion #region Copy Elements //since we are going to modify the taxonomy item index.... //we need to clone the elements clone.allElements = new Hashtable(); foreach (DictionaryEntry de in this.allElements) { clone.allElements[de.Key] = (de.Value as Element).CreateCopyForMerging(); } //fix thr parent element foreach (Element ele in clone.allElements.Values) { ele.ResetTupleRelationship(clone.allElements); } foreach (DictionaryEntry de in customDataTypesHash) { clone.customDataTypesHash[de.Key] = de.Value; } foreach (KeyValuePair<string, RoleRef> kvp in this.roleRefs) { clone.roleRefs[kvp.Key] = kvp.Value; } foreach (KeyValuePair<string, RoleType> kvp in this.roleTypes) { clone.roleTypes[kvp.Key] = kvp.Value; } if (this.enumTable != null) { clone.enumTable = new Hashtable(); foreach (DictionaryEntry de in enumTable) { clone.enumTable[de.Key] = de.Value; } } if (this.extendedDataMappings != null) { clone.extendedDataMappings = new Hashtable(); foreach (DictionaryEntry de in extendedDataMappings) { clone.extendedDataMappings[de.Key] = de.Value; } } if (this.referenceTable != null) { clone.referenceTable = new Hashtable(); foreach (DictionaryEntry de in referenceTable) { clone.referenceTable[de.Key] = de.Value; } } if (this.DirectDependantTaxonomies != null) { clone.DirectDependantTaxonomies = new List<string>(); clone.DirectDependantTaxonomies.AddRange(this.DirectDependantTaxonomies); } if (this.presentationFile != null) { List<string> tmp = new List<string>(this.presentationFile); clone.presentationFile = tmp.ToArray(); } if (this.calculationFile != null) { List<string> tmp = new List<string>(this.calculationFile); clone.calculationFile = tmp.ToArray(); } if (this.labelFile != null) { List<string> tmp = new List<string>(this.labelFile); clone.labelFile = tmp.ToArray(); } if (this.referenceFile != null) { List<string> tmp = new List<string>(this.referenceFile); clone.referenceFile = tmp.ToArray(); } if (this.definitionFile != null) { List<string> tmp = new List<string>(this.definitionFile); clone.definitionFile = tmp.ToArray(); } clone.dependantTaxonomyFilenames = new List<string>(this.dependantTaxonomyFilenames); #endregion //shallow copy as the information is not modified.... #region shallow copy clone.theDocument = this.theDocument; clone.theManager = this.theManager; #endregion #region things not copied //this.numCalcErrors; //this.numPresErrors; //this.numLabelErrors; //this.numDefErrors; //this.numRefErrors; //this.numWarnings; //this.OwnerHandle; //this.errorList; //numElementErrors; //dependantTaxonomies; //validationErrors //validationWarnings #endregion //since this is going to be merged into another taxonomy //inner taxonomy needs to be set to true clone.innerTaxonomy = true; clone.isCopiedForMerging = true; return clone; } }
/// <summary> /// set the list of roles for which we need to add the targetrole information /// this is to ensure that the entry point created with the new us gaap taxonomy /// is usable with respect to the common dimensions... /// </summary> /// <param name="baseTaxonomyList"></param> /// <param name="selectedURIs"></param> /// <param name="targetExts"></param> /// <returns></returns> public static bool DoesAnyOfTheSelectedRolesNeedTargetRole(Taxonomy[] baseTaxonomyList, List<string> selectedURIs, out List<Dimension.TargetDimensionInfo> targetExts ) { targetExts = new List<Dimension.TargetDimensionInfo>(); Dictionary<string, DefinitionLink> rolesByCommonDimension = new Dictionary<string, DefinitionLink>(); List<string> commonRoles = new List<string>(); foreach (Taxonomy tax in baseTaxonomyList) { if (tax.allElements == null || tax.allElements.Count == 0) continue; if (tax.netDefinisionInfo == null) continue; if (tax.CurrentLanguage == null) { if (tax.supportedLanguages.Count == 0) continue; tax.CurrentLanguage = tax.supportedLanguages[0] as string; } if (tax.currentLabelRole == null) { tax.currentLabelRole = PresentationLocator.preferredLabelRole; } Dictionary<string, DimensionNode> commonDimensionNodes; if (!tax.netDefinisionInfo.TryGetDimensionNodesForDisplay(tax.currentLanguage, tax.currentLabelRole, tax.presentationInfo, true, false, tax.roleTypes, out commonDimensionNodes)) { continue; } foreach (KeyValuePair<string, DimensionNode> kvp in commonDimensionNodes) { if (!selectedURIs.Contains(kvp.Key)) continue; //this common dimension is not used... commonRoles.Add(kvp.Key); } if (commonRoles.Count == 0) continue; //no common dimension.... tax.netDefinisionInfo.BuildSelectedCommonDimensionDictionary(commonRoles, commonDimensionNodes, ref rolesByCommonDimension); } foreach (Taxonomy tax in baseTaxonomyList) { Dictionary<string, DimensionNode> allDimensionNodes; if (!tax.netDefinisionInfo.TryGetDimensionNodesForDisplay(tax.currentLanguage, tax.currentLabelRole, tax.presentationInfo, true, true, tax.roleTypes, out allDimensionNodes)) { continue; } tax.netDefinisionInfo.DoesAnyOfTheSelectedRolesNeedTargetRole(selectedURIs, commonRoles, rolesByCommonDimension, allDimensionNodes, ref targetExts, tax); } return targetExts.Count > 0; }
internal static void Serialize( Taxonomy currentTaxonomy, string fileName, out long FileSize ) { FileStream fs = new FileStream(fileName, FileMode.Create); BinaryFormatter formatter = new BinaryFormatter(); try { formatter.Serialize(fs, currentTaxonomy); FileSize = fs.Length; } catch (SerializationException e) { Console.WriteLine("Failed to serialize: " + e.Message); System.Diagnostics.Debug.WriteLine("Failed to serialize: " + e.Message); throw; } finally { fs.Close(); } }
public void Test_us_gaap_ci_stm_dis_all_2007_12_31() { string fileName = TestCommon.FolderRoot + @"USFRTF-2007-11-21-prerelease\ind\ci" + System.IO.Path.DirectorySeparatorChar + "us-gaap-ci-stm-dis-all-2007-12-31.xsd"; Taxonomy s = new Taxonomy(); int errors = 0; DateTime startLoad = DateTime.Now; DateTime startLoadParse = DateTime.Now; if (s.Load(fileName, out errors) != true) { Assert.Fail((string)s.ErrorList[0]); } DateTime endLoad = DateTime.Now; DateTime startParse = DateTime.Now; s.Parse(out errors); DateTime endParse = DateTime.Now; DateTime endLoadParse = DateTime.Now; s.CurrentLanguage = "en"; s.CurrentLabelRole = "preferredLabel"; long FileSize = new long(); DateTime startSerialize = DateTime.Now; Taxonomy.Serialize(s, @"C:\Aucent\TaxonomySerialized.bin", out FileSize); DateTime endSerialize = DateTime.Now; //Taxonomy sNew = new Taxonomy(); //DateTime startDeserialize = DateTime.Now; //Taxonomy.Deserialize(@"C:\Aucent\TaxonomySerialized.bin", out sNew); //DateTime endDeserialize = DateTime.Now; DateTime startGetNodes = DateTime.Now; ArrayList nodes = s.GetNodesByPresentation(true); DateTime endGetNodes = DateTime.Now; //ArrayList nodesNew = sNew.GetNodesByPresentation(true); //int count = 0; //int dimensionNodeCount = 0; //foreach (Node n in nodes) //{ // StringBuilder sb = DisplayNode(n, 0, ref count, ref dimensionNodeCount); // Console.WriteLine(sb.ToString()); //} //int countNew = 0; //int dimensionNodeCountNew = 0; //foreach (Node n in nodesNew) //{ // StringBuilder sb = DisplayNode(n, 0, ref countNew, ref dimensionNodeCountNew); // Console.WriteLine(sb.ToString()); //} Console.WriteLine("Time to Load: {0}", endLoad - startLoad); Console.WriteLine("Time to Parse: {0}", endParse - startParse); Console.WriteLine("Time to Load/Parse: {0}", endLoadParse - startLoadParse); Console.WriteLine("Time to Serialize: {0}", endSerialize - startSerialize); //Console.WriteLine("Time to Deserialize: {0}", endDeserialize - startDeserialize); Console.WriteLine("Time to GetNodes: {0}", endGetNodes - startGetNodes); Console.WriteLine("Serialized Filesize: {0}", FileSize); //Console.WriteLine("Count of elements in original presentation = {0}", count); //Console.WriteLine("Count of elements in deserialized presentation = {0}", countNew); }
private static void Deserialize( string fileName, out Taxonomy currentTaxonomy ) { FileStream fs = new FileStream(fileName, FileMode.Open); try { BinaryFormatter formatter = new BinaryFormatter(); currentTaxonomy = (Taxonomy) formatter.Deserialize(fs); } catch (SerializationException e) { Console.WriteLine("Failed to serialize: " + e.Message); System.Diagnostics.Debug.WriteLine("Failed to deserialize: " + e.Message); throw; } finally { fs.Close(); } }
public void TestLoadICI_RR_Taxonomy() { Taxonomy iciTaxonomy = new Taxonomy(); int numErrors = iciTaxonomy.Load(ICI_RR_SCHEMAFILE, false); Assert.AreEqual(0, numErrors, "Failed to load ICI-RR Taxonomy. " + numErrors + " errors were found"); Assert.IsTrue(iciTaxonomy.Parse(out numErrors), "Failed to parse the ICI-RR Taxonomy: " + numErrors + " errors were found"); }
internal bool DoesAnyOfTheSelectedRolesNeedTargetRole(List <string> selectedURIs, List <string> commonRoles, Dictionary <string, DefinitionLink> rolesByCommonDimension, Dictionary <string, DimensionNode> allDimensionNodes, ref List <Dimension.TargetDimensionInfo> targetExts, Taxonomy taxonomy) { foreach (string str in selectedURIs) { //if (commonRoles.Contains(str)) continue; //it is a common role.. no need to check for target role logic.. DimensionNode titleNode; if (!allDimensionNodes.TryGetValue(str, out titleNode)) { continue; //role doesnot have any dimension or is not part of this taxonomy } if (titleNode.children == null) { continue; } foreach (DimensionNode hypercubeNode in titleNode.children) { if (hypercubeNode.children == null) { continue; } foreach (DimensionNode dn in hypercubeNode.children) { if (dn.children == null) { continue; } //check if this is a common dimension node... DefinitionLink targetRole; if (!rolesByCommonDimension.TryGetValue(dn.Id, out targetRole)) { continue; } if (!targetRole.Role.Equals(str)) { foreach (DimensionNode dimDom in dn.children) { if (dimDom.children == null || dimDom.children.Count == 0) { //found a common dimension that does not have any children defined... TargetDimensionInfo tdi = new TargetDimensionInfo(); tdi.DimensionNode = dn; tdi.MemberNode = dimDom; tdi.TargetRole = targetRole; tdi.Taxonomy = taxonomy; targetExts.Add(tdi); } } } } } } return(targetExts.Count > 0); }
public bool UpdateOptionalArc(string elementId, string parentElementId, DefinitionLocatorRelationshipInfo newLocatorRelationshipInfo, Taxonomy taxonomy) { DefinitionLocator parentLocator = locators[parentElementId] as DefinitionLocator; newLocatorRelationshipInfo.IsProhibited = false; newLocatorRelationshipInfo.OrigOrder = newLocatorRelationshipInfo.Order; if (parentLocator == null && !string.IsNullOrEmpty(parentElementId)) { parentLocator = new DefinitionLocator(); parentLocator.href = parentElementId; parentLocator.MyElement = taxonomy.allElements[parentElementId] as Element; locators[parentElementId] = parentLocator; } DefinitionLocator childLocator = locators[elementId] as DefinitionLocator; if (childLocator == null) { childLocator = new DefinitionLocator(); childLocator.href = elementId; childLocator.MyElement = taxonomy.allElements[elementId] as Element; if (parentLocator != null) { childLocator.AddParent(parentLocator); } locators[elementId] = childLocator; } if (parentLocator != null) { if (parentLocator.childLocatorsByHRef == null) { parentLocator.childLocatorsByHRef = new HybridDictionary(); } ChildDefinitionLocator cpl = parentLocator.childLocatorsByHRef[elementId] as ChildDefinitionLocator; if (cpl != null) { cpl.AddRelationship(newLocatorRelationshipInfo); } else { cpl = new ChildDefinitionLocator(elementId, newLocatorRelationshipInfo); parentLocator.childLocatorsByHRef[elementId] = cpl; } } return true; }
private void MergeDiscoveredTaxonomy(string fn, Taxonomy depTax, out int numErrors ) { numErrors = 0; if (this.dependantTaxonomyFilenames.Contains(fn)) return; dependantTaxonomyFilenames.Add(fn); dependantTaxonomies.Add(depTax); this.infos.Add(new TaxonomyItem(depTax.GetTargetNamespace(), depTax.schemaFile, depTax.GetNSPrefix(), depTax.IsAucentExtension, depTax.DefinesCustomTypes)); ArrayList tmp; //merge the presentation, calculation, definition linkbases... if (!this.innerTaxonomy) { presentationInfo = MergePresentations(presentationInfo, depTax.presentationInfo, out tmp); depTax.presentationInfo = null; if (tmp.Count > 0) { numErrors += tmp.Count; errorList.AddRange(tmp); } calculationInfo = MergePresentations(calculationInfo, depTax.calculationInfo, out tmp); if (tmp.Count > 0) { numErrors += tmp.Count; errorList.AddRange(tmp); } if (this.netDefinisionInfo == null) { this.netDefinisionInfo = depTax.netDefinisionInfo; } else { netDefinisionInfo.MergeDimensionLinks(depTax.netDefinisionInfo, out tmp); if (tmp.Count > 0) { numErrors += tmp.Count; errorList.AddRange(tmp); } } } }
public void TestSaveToLocalApplicationData() { Taxonomy tx = new Taxonomy(); int errors = 0; DateTime start = DateTime.Now; // we only save to local app data if it's a web file location... Assert.AreEqual( true, tx.Load( US_GAAP_WEB_FILE, out errors ), "Could not load US GAAP File" ); Assert.AreEqual( 0, errors ); tx.Parse(out errors); DateTime end = DateTime.Now; Console.WriteLine( "Parse Time: {0}", end-start ); FileInfo fi = null; // Expecting 10 files to copy over... // 1. usfr-pte-2005-02-28.xsd fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-pte-2005-02-28.xsd" ); Assert.IsTrue(fi.Exists, "File not found: " + fi.Name); // 2. usfr-pte-2005-02-28-label.xml fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-pte-2005-02-28-label.xml" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 3. usfr-pte-2005-02-28-presentation.xml fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-pte-2005-02-28-presentation.xml" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 4. usfr-pte-2005-02-28-reference.xml fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-pte-2005-02-28-reference.xml" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 5. usfr-ptr-2005-02-28.xsd fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-ptr-2005-02-28.xsd" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 6. usfr-ptr-2005-02-28-calculation.xml fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-ptr-2005-02-28-calculation.xml" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 7. usfr-ptr-2005-02-28-presentation.xml fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-ptr-2005-02-28-presentation.xml" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 8. us-gaap-ci-2005-02-28.xsd fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "us-gaap-ci-2005-02-28.xsd" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 9. us-gaap-ci-2005-02-28-calculation.xml fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "us-gaap-ci-2005-02-28-calculation.xml" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); // 10. us-gaap-ci-2005-02-28-presentation.xml fi = new FileInfo( AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "us-gaap-ci-2005-02-28-presentation.xml" ); Assert.IsTrue( fi.Exists, "File not found: " + fi.Name ); }
private int MergeElements( Taxonomy childTaxonomy , Dictionary<string, int> taxItemIdDt ) { //sometimes the same element might have more than one key... //as the id does not match prefix_name //id is plural and name is singular in the example below... //<element name="OtherReceivablesBank" id="krfr-pte_OtherReceivablesBanks" type="xbrli:monetaryItemType" substitutionGroup="xbrli:item" nillable="true" xbrli:balance="debit" xbrli:periodType="instant" /> Dictionary<string, Element> idsChecked = new Dictionary<string, Element>(); foreach( DictionaryEntry de in childTaxonomy.allElements) { if (!this.allElements.ContainsKey(de.Key)) { Element ele = de.Value as Element; if (idsChecked.ContainsKey(ele.Id)) { Element otherEle; if (idsChecked.TryGetValue(ele.Id, out otherEle)) { ele.TaxonomyInfoId = otherEle.TaxonomyInfoId; } } else { ele.TaxonomyInfoId = taxItemIdDt[childTaxonomy.infos[ele.TaxonomyInfoId].WebLocation.ToLower()]; idsChecked[ele.Id] = ele; } this.allElements[de.Key] = ele; } } return 0; }
private void AddDependantTaxonomy(string filename, string hintpath, string ns, ref int myErrors) { // compare against ourself if (string.Compare(filename, schemaFilename, true) == 0) { throw new AucentFatalException("XBRLParser.Error.RecursiveTaxonomies"); } string fileNameWithoutPath = Path.GetFileName(filename); int index = allFiles.BinarySearch(fileNameWithoutPath); if (index >= 0 || IsDependentTaxonomyLoaded(fileNameWithoutPath)) { Console.WriteLine("Taxonomy.GetDependantTaxonomies (" + schemaFile + "): Skip: " + fileNameWithoutPath); return; //nothing to add as the file is already loaded. } if (TaxonomyCacheManager != null) { Taxonomy depTax = TaxonomyCacheManager.GetTaxonomyByFileName(fileNameWithoutPath); if (depTax != null) { allFiles.Insert(~index, fileNameWithoutPath); //we found the dependant taxonomy in the cache .. we need to copy it to // make sure that we are not messing with the original Taxonomy copyToAdd = depTax.CopyTaxonomyForMerging(); if (copyToAdd.allFiles != null) { this.allFiles.AddRange(copyToAdd.allFiles); this.allFiles.Sort(); } dependantTaxonomies.Add(copyToAdd); dependantTaxonomyFilenames.Add(copyToAdd.schemaFilename); return; } } string fn = filename; string schemaFolder = schemaPath.Replace("\\", "/"); if (!schemaFolder.EndsWith("/")) { schemaFolder = schemaFolder + "/"; } if (hintpath != null) { hintpath = hintpath.Replace("\\", "/"); } //hint could be relative.. in which case we need to combine it with the //schema location of the taxonomy... if (hintpath == null || hintpath.Length == 0 || hintpath.StartsWith("..") || hintpath.StartsWith("/")) { if (hintpath != null && hintpath.Length > 0) { fn = schemaFolder + hintpath + "/" + filename; } else { fn = schemaFolder + filename; } } else if (hintpath != null ) { if (hintpath.EndsWith("/")) { fn = hintpath + filename; } else { fn = hintpath + "/" + filename; } } bool local = false; DateTime lastModified = DateTime.MinValue; bool URLExists; Dictionary<string, bool> filesAlreadyChecked = new Dictionary<string, bool>(); bool isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; if (!isValid && Directory.Exists(schemaFolder)) { fn = schemaFolder + "/" + filename; isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; if (!isValid) { fn = schemaFolder + hintpath + "/" + filename; isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } while (true ) { #region Check other locations if we cannot find the file in the location it is supposed to be in //check the following locations //ns + filename //hintpath + file name //parent location + hint path + filename //parent location + filename //current schema path + hint path + file name //current schema path + file name //ns + hint path + filename //just file name //isolated storage bool localPromptUser = false; if( !isValid) { //hintpath + file name fn = Aucent.MAX.AXE.Common.Utilities.AucentGeneral.AppendFileNameToSchemaPath(hintpath, filename); if (!filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { //parent location + hint path + filename fn = Aucent.MAX.AXE.Common.Utilities.AucentGeneral.AppendFileNameToSchemaPath(schemaPath + hintpath, filename); if (!filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { //parent location + filename fn = Aucent.MAX.AXE.Common.Utilities.AucentGeneral.AppendFileNameToSchemaPath(schemaPath, filename); if (!filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { //ns + filename fn = Aucent.MAX.AXE.Common.Utilities.AucentGeneral.AppendFileNameToSchemaPath(ns, filename); if (!isValid && !filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { //current schema path + hint path + file name string path2 = string.Empty; if (local) path2 = schemaPath + Path.DirectorySeparatorChar + hintpath; else path2 = schemaPath + "/" + hintpath; fn = Aucent.MAX.AXE.Common.Utilities.AucentGeneral.AppendFileNameToSchemaPath(path2, filename); if (!filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { //current path + filename try - fn = Aucent.MAX.AXE.Common.Utilities.AucentGeneral.AppendFileNameToSchemaPath(schemaPath, filename); if (!filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { //ns + hint path + filename string path2 = string.Empty; if (local) path2 = ns + Path.DirectorySeparatorChar + hintpath; else path2 = ns + "/" + hintpath; fn = Aucent.MAX.AXE.Common.Utilities.AucentGeneral.AppendFileNameToSchemaPath(path2, filename); if (!filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { fn = filename; if (!filesAlreadyChecked.ContainsKey(fn)) { isValid = ValidateFileExistance(fn, false, out local, out fn, out lastModified, out URLExists); filesAlreadyChecked[fn] = true; } } if (!isValid) { localPromptUser = true; // don't bother to test this one - we're done trying fn = filename; } Console.WriteLine("Taxonomy.GetDependantTaxonomies (" + schemaFile + "): Load: " + fn); #endregion Taxonomy t = new Taxonomy(true); t.PromptUser = this.PromptUser; t.OwnerHandle = OwnerHandle; //set the loading tax to the current tax or its loading tax object... t.loadingTaxonomy = GetLoadingTaxonomy(); // continue adding all files t.allFiles = allFiles; if (this.TopLevelTaxonomy != null) { t.TopLevelTaxonomy = this.TopLevelTaxonomy; } else { t.TopLevelTaxonomy = this; } try { if (t.Load(fn, localPromptUser && t.PromptUser ) != 0) { if (!localPromptUser) { //we found a valid file that does not exist.... //lets see if we can find a different file this time continue; } else { Common.WriteError("XBRLParser.Error.CantLoadFilename", errorList, fn, " to find the file"); ++myErrors; string errorMsg = null; if (errorList != null && errorList.Count > 0) { errorMsg = errorList[errorList.Count - 1] as string; if (errorMsg == null && errorList[errorList.Count - 1] is ParserMessage) { ParserMessage msg = errorList[errorList.Count - 1] as ParserMessage; if (msg != null) { errorMsg = msg.Message; } } } else { errorMsg = string.Format("Failed to load file {0}.", fn); } throw new AucentFatalException(errorMsg); } } } catch (XmlException xe) { Common.WriteError("XBRLParser.Error.CantLoadFilename", errorList, fn, xe.Message); ++myErrors; return; } if (BuildTaxonomyRelationship) { if (DirectDependantTaxonomies == null) DirectDependantTaxonomies = new List<string>(); if (!DirectDependantTaxonomies.Contains(fn)) { DirectDependantTaxonomies.Add(fn); } } allFiles.Insert(~index, fileNameWithoutPath); dependantTaxonomyFilenames.Add(fn); dependantTaxonomies.Add(t); int errors = t.LoadDependantTaxonomies(t.schemaPath); dependantTaxonomies.AddRange(t.dependantTaxonomies); dependantTaxonomyFilenames.AddRange(t.dependantTaxonomyFilenames); if (errors > 0) { errorList.Add(t.errorList); myErrors += errors; } break; } }
public void TestSaveToLocalApplicationData() { string fileName = AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "us-gaap-ci-2005-02-28.xsd"; if (File.Exists(fileName)) { File.Delete(fileName); } fileName = AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-ptr-2005-02-28.xsd"; if (File.Exists(fileName)) { File.Delete(fileName); } Taxonomy tx = new Taxonomy(); int errors = 0; DateTime start = DateTime.Now; Assert.AreEqual( true, tx.Load( "http://www.xbrl.org/us/fr/gaap/ci/2005-02-28/us-gaap-ci-2005-02-28.xsd", out errors ), "Could not load US GAAP File" ); Assert.AreEqual( 0, errors ); tx.Parse(out errors); DateTime end = DateTime.Now; Console.WriteLine( "Parse Time: {0}", end-start ); fileName = AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "us-gaap-ci-2005-02-28.xsd"; Assert.IsTrue(File.Exists(fileName)); fileName = AucentGeneral.RivetApplicationDataDragonTagPath + System.IO.Path.DirectorySeparatorChar + "usfr-ptr-2005-02-28.xsd"; Assert.IsTrue(File.Exists(fileName)); }
public void TestCheckIsTextBlock() { //The original type is wrong, but the element name is right Element child1 = Element.CreateElement( DataTypeCode.String, "us-gaap_MortgageLoansOnRealEstateWriteDownOrReserveDisclosureTextBlock", true, PeriodType.duration ); child1.OrigElementType = "xbrli:stringItemType"; bool test1 = child1.IsTextBlock(); Assert.IsTrue( test1 ); //The original type is right child1 = Element.CreateElement( DataTypeCode.String, "child1", true, PeriodType.duration ); child1.OrigElementType = TEXT_BLOCK_ITEM_TYPE; bool test2 = child1.IsTextBlock(); Assert.IsTrue( test2 ); //Close, but not the same - should be false child1 = Element.CreateElement( DataTypeCode.String, "us-gaap_MortgageLoansOnRealEstateWriteDownOrReserveDisclosureTextBloc", true, PeriodType.duration ); child1.OrigElementType = "xbrli:stringItemType"; bool test3 = child1.IsTextBlock(); Assert.IsFalse( test3 ); //Not even close... child1 = Element.CreateElement( DataTypeCode.String, "us-gaap", true, PeriodType.duration ); child1.OrigElementType = "xbrli:positiveIntegerItemType"; bool test4 = child1.IsTextBlock(); Assert.IsFalse( test4 ); //Check a real element string fileName = "http://xbrl.us/us-gaap/1.0/ind/ci/us-gaap-ci-stm-2008-03-31.xsd"; int errors = 0; Taxonomy s = new Taxonomy(); if( s.Load( fileName, out errors ) != true ) { Assert.Fail( (string)s.ErrorList[ 0 ] ); } s.Parse( out errors ); Element child2 = s.allElements[ "us-gaap_MortgageLoansOnRealEstateWriteDownOrReserveDisclosureTextBlock" ] as Element; bool test5 = child2.IsTextBlock(); Assert.IsTrue( test5 ); child2 = s.allElements[ "us-gaap_AccountingChangesAndErrorCorrectionsTextBlock" ] as Element; bool test6 = child2.IsTextBlock(); Assert.IsTrue( test6 ); child2 = s.allElements["us-gaap_BankruptcyClaimsNumberClaimsFiled"] as Element; object parsed = new object(); string err; Assert.IsTrue(child2.TryValidateElement("1,000", ref parsed, out err), "Should be valid"); }
private string CreateMergedTaxonomyFileNameFromInstanceDocumentMultiple(Instance instanceDoc, bool promptUser, string baseHref) { StringBuilder taxonomyNameSpace = new StringBuilder(); StringBuilder prefix = new StringBuilder(); StringBuilder importStatement = new StringBuilder(); string linkbaseStatement = instanceDoc.GetEmbeddedLinkbaseInfo(); foreach (string loc in instanceDoc.schemaRefs) { string location = ApplyBaseHRef(baseHref, loc); Taxonomy t = new Taxonomy(); if (t.Load(location, promptUser) > 0) return string.Empty; t.GetNSPrefix(); prefix.Append(t.nsPrefix); taxonomyNameSpace.Append(t.TargetNamespace); string href = ApplyBaseHRef(baseHref, t.schemaFile); //CEE 2009-05-28: Clean up the & within "W R Grace & Company" //Remove 1 level of encoding, and then reapply it. href = href.Replace( "&", "&" ).Replace( "&", "&" ); importStatement.Append(string.Format(@"<import namespace=""{0}"" schemaLocation=""{1}"" />", t.TargetNamespace, href ) ); importStatement.Append(Environment.NewLine); } string fileName = "Temp_" + DateTime.Now.Ticks.ToString() + "_Merged.xsd"; fileName = Path.Combine(System.Environment.CurrentDirectory, fileName); string taxonomyFileInfo = string.Format(Taxonomy.WrapperTaxonomyTemplate, taxonomyNameSpace.ToString(), prefix.ToString(), importStatement.ToString(), linkbaseStatement); StreamWriter sw = new StreamWriter(fileName, false); sw.Write(taxonomyFileInfo); sw.Close(); return fileName; }