public void ShouldRemoveAndContainsReturnFalse() { var stringSet = new StringSet(); stringSet.Add("Test"); Assert.IsTrue(stringSet.Contains("Test")); stringSet.Remove("Test"); Assert.IsFalse(stringSet.Contains("Test")); }
} //method protected void CheckAliases(TranslationContext context, IEnumerable <Expression> outExpressions) { var sqlExpressions = outExpressions.OfType <SqlExpression>().ToList(); var allNames = new StringSet(); foreach (var outExpr in sqlExpressions) { string outName = null; var col = outExpr as ColumnExpression; if (col != null) { //if (isView) col.Alias = col.Name; outName = col.Alias ?? col.Name; } var needsAlias = outName != null && allNames.Contains(outName); if (outName != null) { allNames.Add(outName); } if (needsAlias) { outExpr.Alias = CreateDefaultAlias(outExpr, allNames); } } //foreach outExpr } //method
public override DbTable GetTableConstraints() { var dtAll = GetSchemaCollection("ForeignKeys"); //this gives us only foreign keys // We need to add PKs; Each PK in SQLite is 'supported' by an index named 'sqlite_autoindex_*' // We scan index columns to pick up such names and add PK rows to dtAll. //Add PKs by scanning index columns and finding special-named indexes (starting with sqlite_autoindex) var dtIndexes = GetIndexColumns(); var tNames = new StringSet(); //track tables to prevent duplicates foreach (DbRow row in dtIndexes.Rows) { var ixName = row.GetAsString("INDEX_NAME"); if (!IsPrimaryKeyIndex(ixName)) { continue; } var tblName = row.GetAsString("TABLE_NAME"); if (tNames.Contains(tblName)) { continue; //don't add duplicates } tNames.Add(tblName); //it is auto-index for PK, create a row for the index var pkRow = dtAll.AddRow(); pkRow["TABLE_NAME"] = tblName; pkRow["CONSTRAINT_NAME"] = row.GetAsString("INDEX_NAME"); pkRow["CONSTRAINT_TYPE"] = "PRIMARY KEY"; } return(dtAll); }
//TODO: This needs more work. Currently it reports all individual symbols most of the time, in a message like // "Syntax error, expected: + - < > = ..."; the better method is to group operator symbols under one alias "operator". // The reason is that code picks expected key list at current(!) state only, // slightly tweaking it for non-terminals, without exploring Reduce roots // It is quite difficult to discover grouping non-terminals like "operator" in current structure. // One possible solution would be to introduce "ExtendedLookaheads" in ParserState which would include // all NonTerminals that might follow the current position. This list would be calculated at start up, // in addition to normal lookaheads. #endregion private StringList GetCurrentExpectedSymbols() { BnfTermList inputElements = new BnfTermList(); StringSet inputKeys = new StringSet(); inputKeys.AddRange(_currentState.Actions.Keys); //First check all NonTerminals foreach (NonTerminal nt in Data.NonTerminals) { if (!inputKeys.Contains(nt.Key)) { continue; } //nt is one of our available inputs; check if it has an alias. If not, don't add it to element list; // and we have already all its "Firsts" keys in the list. // If yes, add nt to element list and remove // all its "fists" symbols from the list. These removed symbols will be represented by single nt alias. if (string.IsNullOrEmpty(nt.DisplayName)) { inputKeys.Remove(nt.Key); } else { inputElements.Add(nt); foreach (string first in nt.Firsts) { inputKeys.Remove(first); } } } //Now terminals foreach (Terminal term in Data.Terminals) { if (inputKeys.Contains(term.Key)) { inputElements.Add(term); } } StringList result = new StringList(); foreach (BnfTerm term in inputElements) { result.Add(string.IsNullOrEmpty(term.DisplayName)? term.Name : term.DisplayName); } result.Sort(); return(result); }
/// <summary> /// Determines if this node has the specified feature. /// </summary> /// <param name="URI">Feature to look for.</param> /// <returns>The node has this feature if true.</returns> public bool HasFeature(string URI) { if (Features == null) { return(false); } return(Features.Contains(URI)); }
protected virtual bool IncludeSchema(string schema) { if (!SupportsSchemas()) { return(true); } return(_schemasSubSet.Contains(schema)); }
private void RunIncludeQueries(Type entityType, IList <EntityRecord> records) { if (records.Count == 0) { return; } var matchingIncludes = _includes.Where(f => f.Parameters[0].Type == entityType).ToList(); if (matchingIncludes.Count == 0) { return; } //filter records if (_processedRecordKeys.Count > 0) { records = records.Where(r => !_processedRecordKeys.Contains(r.PrimaryKey.AsString())).ToList(); if (records.Count == 0) { return; } } var runCount = IncrementRunCount(entityType); if (runCount > MaxNestedRunsPerEntityType) { return; } IList <EntityRecord> results; foreach (var include in matchingIncludes) { var body = include.Body; switch (body.NodeType) { case ExpressionType.MemberAccess: // ex: session.EntitySet<IBookReview>().Where(...).Include(r => r.Book).ToList(); // include: (r=>r.Book) var ma = body as MemberExpression; // r.Book results = RunIncludeForMember(entityType, records, ma); // add pk-s to processed records _processedRecordKeys.UnionWith(results.Select(r => r.PrimaryKey.AsString())); break; case ExpressionType.New: var newExpr = (NewExpression)body; foreach (var arg in newExpr.Arguments) { var marg = arg as MemberExpression; RunIncludeForMember(entityType, records, marg); } break; default: Util.Check(false, "Invalid Include expression: {0}.", body); break; } //switch body.NodeType } //foreach include } //method
public JID GetFeatureJID(CapsManager caps, string featureURI) { gen.LinkedListNode<Presence> n; for (n = m_all.Last; n != null; n = n.Previous) { StringSet f = GetFeatures(caps, n.Value); if ((f != null) && f.Contains(featureURI)) return n.Value.From; } return null; }
public void ShouldEnumerate() { var stringSet = new StringSet(); stringSet.Add("Test"); stringSet.Add("Test"); stringSet.Add("Test"); foreach (var test in stringSet) { Assert.IsTrue(stringSet.Contains(test)); } }
protected virtual bool IncludeSchema(string schema) { if (Driver.IsSystemSchema(schema)) { return(false); } if (!SupportsSchemas() || _schemasSubSet.Count == 0) { return(true); } return(_schemasSubSet.Contains(schema)); }
public Adapters(DataGroups dataGroups, params string[] validAdapterNames) { this.dataGroups = dataGroups; adapters = new StringHash <Adapter <T> >(true); validAdapters = new StringSet(true); if (validAdapterNames.Length == 0) { isValidAdapterName = _ => true; } else { isValidAdapterName = adapterName => adapterName.IsNotEmpty() && validAdapters.Contains(adapterName); validAdapters.AddRange(validAdapterNames); } }
/// <summary> /// Is the given feature URI supported by this entity? /// </summary> /// <param name="featureURI">The URI to check</param> /// <returns></returns> public bool HasFeature(string featureURI) { if (m_features != null) { return(m_features.Contains(featureURI)); } foreach (DiscoFeature feat in GetElements <DiscoFeature>()) { if (feat.Var == featureURI) { return(true); } } return(false); }
public static AcpiDevice[] LoadDevices() { OperationRegionAccessor operationRegionAccessor = new OperationRegionAccessor(); acpiNamespace = new AcpiNamespace(); reservedObjects = new ReservedObjects(acpiNamespace); reservedObjects.CreateReservedObjects(); if (dsdt != null) { if (ParseAndLoadRegion(dsdt.Region, operationRegionAccessor) == AmlParser.ParseSuccess.Failure) { throw new Exception("AML parser failure while parsing DSDT"); } } // From the spec: "SSDTs are a continuation of the DSDT. Multiple SSDTs // can be used as part of a platform description. After the DSDT is loaded // into the ACPI Namespace, each secondary description table listed in the // RSDT/XSDT with a unique OEM Table ID is loaded." - section 2.1, General // ACPI Terminology StringSet visitedOemTableIds = new StringSet(); for (int i = 0; i < rsdt.EntryCount; i++) { SystemTableHeader header = rsdt.GetTableHeader(i); VerboseOut.Print(" {0:x8}\n", __arglist(header.Signature)); string oemTableId = header.OemTableId; if (!visitedOemTableIds.Contains(oemTableId) && header.Signature == Ssdt.Signature) { visitedOemTableIds.Add(oemTableId); ssdt = Ssdt.Create(header); if (ParseAndLoadRegion(ssdt.Region, operationRegionAccessor) == AmlParser.ParseSuccess.Failure) { throw new Exception("AML parser failure while parsing SSDT " + oemTableId); } } } #if DUMP_ACPI_NAMESPACE DebugStub.WriteLine("Dumping ACPI namespace tree..."); acpiNamespace.DumpTree(); #endif return(GetDeviceInfo(operationRegionAccessor)); }
public void TestStringSet() { using (StringSet set = new StringSet("test", true)) { set.Clear(); List <string> strings = new List <string>(); Random r = new Random(); for (int i = 0; i < 1000; i++) { strings.Add(RandomString(64)); } strings.Sort(); Assert.AreEqual(strings.Count, set.AddMany(strings)); Assert.AreEqual(0, set.AddMany(strings)); string[] existing = set.Enumerate().ToArray(); Assert.AreEqual(strings, existing); Assert.IsFalse(set.Contains("test")); Assert.IsFalse(set.Contains("test2")); Assert.IsTrue(set.Add("test")); Assert.IsTrue(set.Add("test2")); Assert.AreEqual(strings.Count + 2, set.GetCount()); Assert.IsFalse(set.Add("test")); Assert.IsFalse(set.Add("test2")); Assert.IsTrue(set.Contains("test")); Assert.IsTrue(set.Contains("test2")); Assert.IsFalse(set.Contains("nothing")); Assert.AreEqual(2, set.DeleteMany(new string[] { "test", "test2" })); Assert.AreEqual(strings.Count, set.GetCount()); Assert.IsFalse(set.Contains("test")); Assert.IsFalse(set.Contains("test2")); Assert.IsFalse(set.Contains("test3")); Assert.IsTrue(set.Add("test3")); Assert.AreEqual(strings.Count + 1, set.GetCount()); Assert.IsTrue(set.Delete("test3")); Assert.IsFalse(set.Delete("test3")); Assert.AreEqual(strings.Count, set.GetCount()); Assert.AreEqual(strings.Count, set.Clear()); Assert.AreEqual(0, set.GetCount()); Assert.AreEqual(new string[0], set.Enumerate()); } }
private void ParseFile(string physicalPath, VirtualPath virtualPath) { // Determine the file used for the circular references checker. Normally, // we use the virtualPath, but we use the physical path if it specified, // as is the case for <!-- #include file="foo.inc" --> string fileToReferenceCheck = physicalPath != null ? physicalPath : virtualPath.VirtualPathString; // Check for circular references of include files if (_circularReferenceChecker.Contains(fileToReferenceCheck)) { throw new HttpException( SR.GetString(SR.Circular_include)); } // Add the current file to the circular references checker. _circularReferenceChecker.Add(fileToReferenceCheck); try { // Open a TextReader either from the physical or virtual path TextReader reader; if (physicalPath != null) { using (reader = Util.ReaderFromFile(physicalPath, virtualPath)) { ParseReader(reader); } } else { using (Stream stream = virtualPath.OpenFile()) { reader = Util.ReaderFromStream(stream, virtualPath); ParseReader(reader); } } } finally { // Remove the current file from the circular references checker _circularReferenceChecker.Remove(fileToReferenceCheck); } }
private StringList GetCurrentExpectedSymbols() { BnfTermList inputElements = new BnfTermList(); StringSet inputKeys = new StringSet(); inputKeys.AddRange(_currentState.Actions.Keys); //First check all NonTerminals foreach (NonTerminal nt in Data.NonTerminals) { if (!inputKeys.Contains(nt.Key)) continue; //nt is one of our available inputs; check if it has an alias. If not, don't add it to element list; // and we have already all its "Firsts" keys in the list. // If yes, add nt to element list and remove // all its "fists" symbols from the list. These removed symbols will be represented by single nt alias. if (string.IsNullOrEmpty(nt.DisplayName)) inputKeys.Remove(nt.Key); else { inputElements.Add(nt); foreach(string first in nt.Firsts) inputKeys.Remove(first); } } //Now terminals foreach (Terminal term in Data.Terminals) { if (inputKeys.Contains(term.Key)) inputElements.Add(term); } StringList result = new StringList(); foreach(BnfTerm term in inputElements) result.Add(string.IsNullOrEmpty(term.DisplayName)? term.Name : term.DisplayName); result.Sort(); return result; }
private void ProcessDirectoryRecursive(VirtualDirectory vdir, bool topLevel) { // If it's a WebReferences directory, handle it using a single WebReferencesBuildProvider // instead of creating a different BuildProvider for each file. if (_dirType == CodeDirectoryType.WebReferences) { // Create a build provider for the current directory BuildProvider buildProvider = new WebReferencesBuildProvider(vdir); buildProvider.SetVirtualPath(vdir.VirtualPathObject); _buildProviders.Add(buildProvider); AddFolderLevelBuildProviders(vdir, FolderLevelBuildProviderAppliesTo.WebReferences); } else if (_dirType == CodeDirectoryType.AppResources) { AddFolderLevelBuildProviders(vdir, FolderLevelBuildProviderAppliesTo.GlobalResources); } else if (_dirType == CodeDirectoryType.LocalResources) { AddFolderLevelBuildProviders(vdir, FolderLevelBuildProviderAppliesTo.LocalResources); } else if (_dirType == CodeDirectoryType.MainCode || _dirType == CodeDirectoryType.SubCode) { AddFolderLevelBuildProviders(vdir, FolderLevelBuildProviderAppliesTo.Code); } // Go through all the files in the directory foreach (VirtualFileBase child in vdir.Children) { if (child.IsDirectory) { // If we are at the top level of this code directory, and the current // subdirectory is in the exclude list, skip it if (topLevel && _excludedSubdirectories != null && _excludedSubdirectories.Contains(child.Name)) { continue; } // Exclude the special FrontPage directory (VSWhidbey 116727) if (child.Name == "_vti_cnf") { continue; } ProcessDirectoryRecursive(child as VirtualDirectory, false /*topLevel*/); continue; } // Don't look at individual files for WebReferences directories if (_dirType == CodeDirectoryType.WebReferences) { continue; } // Skip neutral files if _onlyBuildLocalizedResources is true if (IsResourceCodeDirectoryType(_dirType)) { if (_onlyBuildLocalizedResources && System.Web.UI.Util.GetCultureName(child.VirtualPath) == null) { continue; } } BuildProvider buildProvider = BuildManager.CreateBuildProvider(child.VirtualPathObject, (IsResourceCodeDirectoryType(_dirType)) ? BuildProviderAppliesTo.Resources : BuildProviderAppliesTo.Code, _bpc.CompConfig, _bpc.ReferencedAssemblies, false /*failIfUnknown*/); // Non-supported file type if (buildProvider == null) { continue; } // For Page resources, don't generate a strongly typed class if (_dirType == CodeDirectoryType.LocalResources && buildProvider is BaseResourcesBuildProvider) { ((BaseResourcesBuildProvider)buildProvider).DontGenerateStronglyTypedClass(); } _buildProviders.Add(buildProvider); } }
/* Function: MergeKeywordsInto_Stage2 * Merges the keywords from the <ConfigFiles.TextFile> into the <Config>, returning whether it was successful. It * assumes all <ConfigFiles.TextCommentTypes> in textConfig have corresponding <CommentTypes> in outputConfig. * Any errors will be added to errorList, such as having a language-specific keyword that doesn't match a name in * <Languages.Manager>. */ protected bool MergeKeywordsInto_Stage2(ref Config outputConfig, ConfigFiles.TextFile textConfig, StringSet ignoredKeywords, Errors.ErrorList errorList) { bool success = true; if (textConfig.HasCommentTypes) { foreach (var commentType in textConfig.CommentTypes) { int commentTypeID = outputConfig.CommentTypeFromName(commentType.Name).ID; #if DEBUG if (commentTypeID == 0) { throw new InvalidOperationException(); } #endif if (commentType.HasKeywordGroups) { foreach (var keywordGroup in commentType.KeywordGroups) { int languageID = 0; if (keywordGroup.IsLanguageSpecific) { var language = EngineInstance.Languages.FromName(keywordGroup.LanguageName); if (language == null) { errorList.Add( Locale.Get("NaturalDocs.Engine", "Comments.txt.UnrecognizedKeywordLanguage(name)", keywordGroup.LanguageName), keywordGroup.PropertyLocation ); success = false; } else { languageID = language.ID; } } foreach (var keywordDefinition in keywordGroup.KeywordDefinitions) { if (!ignoredKeywords.Contains(keywordDefinition.Keyword)) { var outputKeywordDefinition = new KeywordDefinition(keywordDefinition.Keyword); outputKeywordDefinition.CommentTypeID = commentTypeID; if (languageID != 0) { outputKeywordDefinition.LanguageID = languageID; } // AddKeywordDefinition will handle overwriting definitions with the same keyword and language outputConfig.AddKeywordDefinition(outputKeywordDefinition); } if (keywordDefinition.HasPlural && !ignoredKeywords.Contains(keywordDefinition.Plural)) { var outputKeywordDefinition = new KeywordDefinition(keywordDefinition.Plural); outputKeywordDefinition.CommentTypeID = commentTypeID; outputKeywordDefinition.Plural = true; if (languageID != 0) { outputKeywordDefinition.LanguageID = languageID; } outputConfig.AddKeywordDefinition(outputKeywordDefinition); } } } } } } return(success); }
public static bool Contains(this StringSet list, ListViewItem packageItem) { return(list.Contains(packageItem.Text)); }
protected void CheckAliases(TranslationContext context, IEnumerable<Expression> outExpressions) { var sqlExpressions = outExpressions.OfType<SqlExpression>().ToList(); var allNames = new StringSet(); foreach(var outExpr in sqlExpressions) { string outName = null; var col = outExpr as ColumnExpression; if (col != null) { //if (isView) col.Alias = col.Name; outName = col.Alias ?? col.Name; } var needsAlias = outName != null && allNames.Contains(outName); if(outName != null) allNames.Add(outName); if(needsAlias) outExpr.Alias = CreateDefaultAlias(outExpr, allNames); }//foreach outExpr }
/* Function: MergeLanguageIdentifiersInto * Merges the file extensions, aliases, and shebang strings from the <ConfigFiles.TextFiles> into the <Config>. It assumes all * <ConfigFiles.TextFileLanguages> in textConfig have corresponding <Language> in outputConfig. */ protected void MergeLanguageIdentifiersInto(ref Config outputConfig, ConfigFiles.TextFile systemTextConfig, ConfigFiles.TextFile projectTextConfig) { // First collect our ignored extensions StringSet ignoredFileExtensions = new StringSet(Config.KeySettingsForFileExtensions); if (systemTextConfig.HasIgnoredFileExtensions) { foreach (var ignoredFileExtension in systemTextConfig.IgnoredFileExtensions) { ignoredFileExtensions.Add(ignoredFileExtension); } } if (projectTextConfig.HasIgnoredFileExtensions) { foreach (var ignoredFileExtension in projectTextConfig.IgnoredFileExtensions) { ignoredFileExtensions.Add(ignoredFileExtension); } } // Now turn our language lists into one big combined one, but not in a way that merges any of its entries like // mergedTextConfig did. Just put them all one after the other. int languageEntryCount = (systemTextConfig.HasLanguages ? systemTextConfig.Languages.Count : 0) + (projectTextConfig.HasLanguages ? projectTextConfig.Languages.Count : 0); List <ConfigFiles.TextFileLanguage> languages = new List <ConfigFiles.TextFileLanguage>(languageEntryCount); if (systemTextConfig.HasLanguages) { languages.AddRange(systemTextConfig.Languages); } if (projectTextConfig.HasLanguages) { languages.AddRange(projectTextConfig.Languages); } // Now apply file extensions, aliases, and shebang strings. We do it from this list instead of mergedTextConfig so // so everything happens in the proper order. For example: // // Language: LanguageA // Extensions: langA // // Language: LanguageB // Extensions: langB // // Alter Language: LanguageA // Replace Extensions: langB // // In this case langB should actually map to LanguageA. Not only that, langA should not be applied at all because // we used Replace instead of Add. for (int i = 0; i < languageEntryCount; i++) { // We don't need to check whether they're defined for the first time, added, or replaced here. In all cases we would // apply them unless there's a future entry that says Replace. bool applyFileExtensions = languages[i].HasFileExtensions; bool applyAliases = languages[i].HasAliases; bool applyShebangStrings = languages[i].HasShebangStrings; // Check for future Replace entries. string normalizedLanguageName = languages[i].Name.NormalizeKey(Config.KeySettingsForLanguageName); for (int j = i + 1; j < languageEntryCount; j++) { if (!applyFileExtensions && !applyAliases && !applyShebangStrings) { break; } if (languages[j].Name.NormalizeKey(Config.KeySettingsForLanguageName) == normalizedLanguageName) { if (languages[j].HasFileExtensions && languages[j].FileExtensionsPropertyChange == ConfigFiles.TextFileLanguage.PropertyChange.Replace) { applyFileExtensions = false; } if (languages[j].HasAliases && languages[j].AliasesPropertyChange == ConfigFiles.TextFileLanguage.PropertyChange.Replace) { applyAliases = false; } if (languages[j].HasShebangStrings && languages[j].ShebangStringsPropertyChange == ConfigFiles.TextFileLanguage.PropertyChange.Replace) { applyShebangStrings = false; } } } // Apply what's left. int languageID = outputConfig.LanguageFromName(languages[i].Name).ID; #if DEBUG if (languageID == 0) { throw new InvalidOperationException(); } #endif if (applyFileExtensions) { foreach (var fileExtension in languages[i].FileExtensions) { if (!ignoredFileExtensions.Contains(fileExtension)) { outputConfig.AddFileExtension(fileExtension, languageID); } } } if (applyAliases) { foreach (var alias in languages[i].Aliases) { outputConfig.AddAlias(alias, languageID); } } if (applyShebangStrings) { foreach (var shebangString in languages[i].ShebangStrings) { outputConfig.AddShebangString(shebangString, languageID); } } } }
public static bool IsSpecialVariable(string variableName) => specialVariables.Contains(variableName);
/* Function: Start * * Loads and combines the two versions of <Comments.txt>, returning whether it was successful. If there were any errors * they will be added to errorList. * * Dependencies: * * - <Config.Manager> must be started before this class can start. */ public bool Start(Errors.ErrorList errorList) { List <ConfigFileCommentType> systemCommentTypeList; List <ConfigFileCommentType> projectCommentTypeList; List <string> ignoredSystemKeywords; List <string> ignoredProjectKeywords; List <string> systemTags; List <string> projectTags; List <CommentType> binaryCommentTypes; List <Tag> binaryTags; List <KeyValuePair <string, int> > binarySingularKeywords; List <KeyValuePair <string, int> > binaryPluralKeywords; List <string> binaryIgnoredKeywords; // The return value, which is whether we were able to successfully load and parse the system Comments.txt, and if it exists, // the project Comments.txt. The project Comments.txt not existing is not a failure. bool success = true; // Whether anything has changed since the last run, as determined by Comments.nd. If Comments.nd doesn't exist or is corrupt, // we have to assume something changed. bool changed = false; Comments_nd commentsNDParser = new Comments_nd(); // We need the ID numbers to stay consistent between runs, so we need to create all the comment types and tags from the // binary file first. We'll worry about comparing their attributes and seeing if any were added or deleted later. if (EngineInstance.Config.ReparseEverything == true) { binaryCommentTypes = new List <CommentType>(); binaryTags = new List <Tag>(); binarySingularKeywords = new List <KeyValuePair <string, int> >(); binaryPluralKeywords = new List <KeyValuePair <string, int> >(); binaryIgnoredKeywords = new List <string>(); changed = true; } else if (commentsNDParser.Load(EngineInstance.Config.WorkingDataFolder + "/Comments.nd", out binaryCommentTypes, out binaryTags, out binarySingularKeywords, out binaryPluralKeywords, out binaryIgnoredKeywords) == false) { changed = true; // Even though it failed, LoadBinaryFile will still create valid empty objects for the variables. } else // Load binary file succeeded { // We use a try block so if anything screwy happens, like two things having the same ID number and thus causing // an exception when added, we can continue as if the binary file didn't parse at all. try { foreach (CommentType binaryCommentType in binaryCommentTypes) { // We don't add the binary comment type itself because we only want those for comparison purposes. We want // the types in commentTypes to be at their default values because the Comments.txt versions will only set some attributes, // not all, and we don't want the unset attributes influenced by the binary versions. CommentType newCommentType = new CommentType(binaryCommentType.Name); newCommentType.ID = binaryCommentType.ID; newCommentType.Flags.InBinaryFile = true; commentTypes.Add(newCommentType); } foreach (Tag binaryTag in binaryTags) { Tag newTag = new Tag(binaryTag.Name); newTag.ID = binaryTag.ID; newTag.InBinaryFile = true; tags.Add(newTag); } } catch { commentTypes.Clear(); tags.Clear(); changed = true; // Clear them since they may be used later in this function. binaryCommentTypes.Clear(); binarySingularKeywords.Clear(); binaryPluralKeywords.Clear(); binaryIgnoredKeywords.Clear(); // Otherwise ignore the exception and continue. } } Path systemFile = EngineInstance.Config.SystemConfigFolder + "/Comments.txt"; Path projectFile = EngineInstance.Config.ProjectConfigFolder + "/Comments.txt"; Path oldProjectFile = EngineInstance.Config.ProjectConfigFolder + "/Topics.txt"; Comments_txt commentsTxtParser = new Comments_txt(); // Load the files. if (!commentsTxtParser.Load(systemFile, out systemCommentTypeList, out ignoredSystemKeywords, out systemTags, errorList)) { success = false; // Continue anyway because we want to show errors from both files. } if (System.IO.File.Exists(projectFile)) { if (!commentsTxtParser.Load(projectFile, out projectCommentTypeList, out ignoredProjectKeywords, out projectTags, errorList)) { success = false; } } else if (System.IO.File.Exists(oldProjectFile)) { if (!commentsTxtParser.Load(oldProjectFile, out projectCommentTypeList, out ignoredProjectKeywords, out projectTags, errorList)) { success = false; } } else { // The project file not existing is not an error condition. Fill in the variables with empty structures. projectCommentTypeList = new List <ConfigFileCommentType>(); ignoredProjectKeywords = new List <string>(); projectTags = new List <string>(); } if (success == false) { return(false); } // Combine the ignored keywords. StringSet ignoredKeywords = new StringSet(KeySettingsForKeywords); foreach (string keyword in ignoredSystemKeywords) { if (keyword != null) { ignoredKeywords.Add(keyword); } } foreach (string keyword in ignoredProjectKeywords) { if (keyword != null) { ignoredKeywords.Add(keyword); } } // Combine the tags foreach (string tagName in systemTags) { Tag tag = tags[tagName]; if (tag == null) { tag = new Tag(tagName); tag.InSystemFile = true; tags.Add(tag); } else { tag.InSystemFile = true; // In case it changed since the binary version. tag.FixNameCapitalization(tagName); } } foreach (string tagName in projectTags) { Tag tag = tags[tagName]; if (tag == null) { tag = new Tag(tagName); tag.InProjectFile = true; tags.Add(tag); } else { tag.InProjectFile = true; tag.FixNameCapitalization(tagName); } } // All the comment types have to exist in IDObjects.Manager before the properties are set because Index With will need their // IDs. This pass only creates the types that were not already created by the binary file. // We don't need to do separate passes for standard entries and alter entries because alter entries should only appear // in the project file and only apply to types in the system file. Anything else is either an error (system file can't alter a // project entry) or would have been simplified out by LoadFile (a file with an alter entry applying to a type in the same // file.) foreach (ConfigFileCommentType commentType in systemCommentTypeList) { if (!Start_CreateType(commentType, systemFile, true, errorList)) { success = false; } } foreach (ConfigFileCommentType commentType in projectCommentTypeList) { if (!Start_CreateType(commentType, projectFile, false, errorList)) { success = false; } } // Need to exit early because Start_ApplyProperties assumes all the types were created correctly. if (success == false) { return(false); } // Now that everything's in commentTypes we can delete the ones that aren't in the text files, meaning they were in // the binary file from the last run but were deleted since then. We have to put them on a list and delete them in a // second pass because deleting them while iterating through would screw up the iterator. List <int> deletedIDs = new List <int>(); foreach (CommentType commentType in commentTypes) { if (commentType.Flags.InConfigFiles == false) { deletedIDs.Add(commentType.ID); changed = true; } } foreach (int deletedID in deletedIDs) { commentTypes.Remove(deletedID); } // Delete the tags that weren't in the text files as well. deletedIDs.Clear(); foreach (Tag tag in tags) { if (tag.InConfigFiles == false) { deletedIDs.Add(tag.ID); changed = true; } } foreach (int deletedID in deletedIDs) { tags.Remove(deletedID); } // Fill in the properties foreach (ConfigFileCommentType commentType in systemCommentTypeList) { if (!Start_ApplyProperties(commentType, systemFile, ignoredKeywords, errorList)) { success = false; } } foreach (ConfigFileCommentType commentType in projectCommentTypeList) { if (!Start_ApplyProperties(commentType, projectFile, ignoredKeywords, errorList)) { success = false; } } if (success == false) { return(false); } // Make sure there are no circular dependencies in Index With. foreach (CommentType commentType in commentTypes) { if (commentType.Index == CommentType.IndexValue.IndexWith) { IDObjects.NumberSet ids = new IDObjects.NumberSet(); CommentType currentType = commentType; do { ids.Add(currentType.ID); if (ids.Contains(currentType.IndexWith)) { // Start the dependency message on the repeated comment type, not on the one the loop started with because // it could go A > B > C > B, in which case reporting A is irrelevant. int repeatedID = currentType.IndexWith; CommentType iterator = commentTypes[repeatedID]; string repeatMessage = iterator.Name; // We want the error message to be on the repeated type only if that's the only one: A > A. Otherwise we // want it to be the second to last one: C in A > B > C > B. CommentType errorMessageTarget = currentType; for (;;) { iterator = commentTypes[iterator.IndexWith]; repeatMessage += " > " + iterator.Name; if (iterator.ID == repeatedID) { break; } errorMessageTarget = iterator; } Path errorMessageFile; List <ConfigFileCommentType> searchList; if (errorMessageTarget.Flags.InProjectFile) { errorMessageFile = projectFile; searchList = projectCommentTypeList; } else { errorMessageFile = systemFile; searchList = systemCommentTypeList; } int errorMessageLineNumber = 0; string lcErrorMessageTargetName = errorMessageTarget.Name.ToLower(); foreach (ConfigFileCommentType searchListType in searchList) { if (searchListType.Name.ToLower() == lcErrorMessageTargetName) { errorMessageLineNumber = searchListType.LineNumber; break; } } errorList.Add( Locale.Get("NaturalDocs.Engine", "Comments.txt.CircularDependencyInIndexWith(list)", repeatMessage), errorMessageFile, errorMessageLineNumber ); return(false); } currentType = commentTypes[currentType.IndexWith]; }while (currentType.Index == CommentType.IndexValue.IndexWith); } } // Simplify Index With. So A > B > C becomes A > C. Also A > B = no indexing becomes A = no indexing. foreach (CommentType commentType in commentTypes) { if (commentType.Index == CommentType.IndexValue.IndexWith) { CommentType targetCommentType = commentTypes[commentType.IndexWith]; while (targetCommentType.Index == CommentType.IndexValue.IndexWith) { targetCommentType = commentTypes[targetCommentType.IndexWith]; } if (targetCommentType.Index == CommentType.IndexValue.No) { commentType.Index = CommentType.IndexValue.No; } else { commentType.IndexWith = targetCommentType.ID; } } } // Everything is okay at this point. Save the files again to reformat them. If the project file didn't exist, saving it // with the empty structures we created will create it. Start_FixCapitalization(systemCommentTypeList); Start_FixCapitalization(projectCommentTypeList); if (!commentsTxtParser.Save(projectFile, projectCommentTypeList, ignoredProjectKeywords, projectTags, errorList, true, false)) { success = false; } ; if (!commentsTxtParser.Save(systemFile, systemCommentTypeList, ignoredSystemKeywords, systemTags, errorList, false, true)) { success = false; } ; // Compare the structures with the binary ones to see if anything changed. if (changed == false) { // First an easy comparison. if (binaryCommentTypes.Count != commentTypes.Count || binaryTags.Count != tags.Count || binaryIgnoredKeywords.Count != ignoredKeywords.Count || singularKeywords.Count != binarySingularKeywords.Count || pluralKeywords.Count != binaryPluralKeywords.Count) { changed = true; } } if (changed == false) { // Next a detailed comparison if necessary. foreach (CommentType binaryCommentType in binaryCommentTypes) { CommentType commentType = commentTypes[binaryCommentType.ID]; if (commentType == null || binaryCommentType != commentType) { changed = true; break; } } if (changed == false) { foreach (Tag binaryTag in binaryTags) { Tag tag = tags[binaryTag.ID]; if (tag == null || binaryTag != tag) { changed = true; break; } } } if (changed == false) { foreach (string binaryIgnoredKeyword in binaryIgnoredKeywords) { if (!ignoredKeywords.Contains(binaryIgnoredKeyword)) { changed = true; break; } } } if (changed == false) { foreach (KeyValuePair <string, int> binarySingularKeywordPair in binarySingularKeywords) { // We can use ID instead of Name because we know they match now. if (singularKeywords.ContainsKey(binarySingularKeywordPair.Key) == false || singularKeywords[binarySingularKeywordPair.Key].ID != binarySingularKeywordPair.Value) { changed = true; break; } } } if (changed == false) { foreach (KeyValuePair <string, int> binaryPluralKeywordPair in binaryPluralKeywords) { // We can use ID instead of Name because we know they match now. if (pluralKeywords.ContainsKey(binaryPluralKeywordPair.Key) == false || pluralKeywords[binaryPluralKeywordPair.Key].ID != binaryPluralKeywordPair.Value) { changed = true; break; } } } } commentsNDParser.Save(EngineInstance.Config.WorkingDataFolder + "/Comments.nd", commentTypes, tags, singularKeywords, pluralKeywords, ignoredKeywords); if (success == true && changed == true) { EngineInstance.Config.ReparseEverything = true; } groupCommentTypeID = IDFromKeyword("group"); return(success); }
/* Function: Start * * Loads and combines the two versions of <Languages.txt>, returning whether it was successful. If there were any errors * they will be added to errorList. * * Dependencies: * * - <Config.Manager> and <CommentTypes.Manager> must be started before using the rest of the class. */ public bool Start(Errors.ErrorList errorList) { StartupIssues newStartupIssues = StartupIssues.None; List <ConfigFileLanguage> systemLanguageList; List <ConfigFileLanguage> projectLanguageList; List <string> ignoredSystemExtensions; List <string> ignoredProjectExtensions; List <Language> binaryLanguages; List <KeyValuePair <string, int> > binaryAliases; List <KeyValuePair <string, int> > binaryExtensions; List <KeyValuePair <string, int> > binaryShebangStrings; List <string> binaryIgnoredExtensions; // The return value, which is whether we were able to successfully load and parse the system Languages.txt, and if // it exists, the project Languages.txt. The project Languages.txt not existing is not a failure. bool success = true; // First add all the predefined languages, since they may be subclassed. foreach (Language language in predefinedLanguages) { languages.Add(language); } // We need the ID numbers to stay consistent between runs, so we create all the languages from the binary file // next. We'll worry about comparing their attributes with the text files and seeing if any were added or deleted later. Languages_nd languagesNDParser = new Languages_nd(this); // Don't bother going through the effort if we're rebuilding everything anyway. if (EngineInstance.HasIssues(StartupIssues.NeedToStartFresh | StartupIssues.CodeIDsInvalidated)) { binaryLanguages = new List <Language>(); binaryAliases = new List <KeyValuePair <string, int> >(); binaryExtensions = new List <KeyValuePair <string, int> >(); binaryShebangStrings = new List <KeyValuePair <string, int> >(); binaryIgnoredExtensions = new List <string>(); newStartupIssues |= StartupIssues.NeedToReparseAllFiles | StartupIssues.CodeIDsInvalidated; } else if (!languagesNDParser.Load(EngineInstance.Config.WorkingDataFolder + "/Languages.nd", out binaryLanguages, out binaryAliases, out binaryExtensions, out binaryShebangStrings, out binaryIgnoredExtensions)) { newStartupIssues |= StartupIssues.NeedToReparseAllFiles | StartupIssues.CodeIDsInvalidated; // Even though it failed, LoadBinaryFiles will still have created valid empty objects for them. } else // LoadBinaryFile succeeded { // We use a try block so if anything screwy happens, like two languages having the same ID number and thus // causing an exception when added, we can continue as if the binary file didn't parse at all. try { foreach (Language binaryLanguage in binaryLanguages) { // We don't add the binary language itself because we only want those for comparison purposes. We otherwise // want the languages to be at their default values because the Languages.txt versions will only set some // attributes, not all. // Check for predefined languages of the same name. If any of the binary languages' IDs collide with the // predefined languages' ones, it will be taken care of by the exception handler. Language existingLanguage = languages[binaryLanguage.Name]; if (existingLanguage == null) { Language newLanguage = new Language(this, binaryLanguage.Name); newLanguage.ID = binaryLanguage.ID; newLanguage.InBinaryFile = true; languages.Add(newLanguage); } else { existingLanguage.InBinaryFile = true; } } } catch { languages.Clear(); newStartupIssues |= StartupIssues.NeedToReparseAllFiles | StartupIssues.CodeIDsInvalidated; foreach (Language predefinedLanguage in predefinedLanguages) { languages.Add(predefinedLanguage); } // Clear them since they may be used later in this function. binaryLanguages.Clear(); binaryAliases.Clear(); binaryExtensions.Clear(); binaryShebangStrings.Clear(); binaryIgnoredExtensions.Clear(); // Otherwise ignore the exception and continue. } } Path systemFile = EngineInstance.Config.SystemConfigFolder + "/Languages.txt"; Path projectFile = EngineInstance.Config.ProjectConfigFolder + "/Languages.txt"; Languages_txt languagesTxtParser = new Languages_txt(); // Load the files. if (!languagesTxtParser.Load(systemFile, Config.PropertySource.SystemLanguageFile, out systemLanguageList, out ignoredSystemExtensions, errorList)) { success = false; // Continue anyway because we want to show errors from both files. } if (System.IO.File.Exists(projectFile)) { if (!languagesTxtParser.Load(projectFile, Config.PropertySource.ProjectLanguageFile, out projectLanguageList, out ignoredProjectExtensions, errorList)) { success = false; } } else { // The project file not existing is not an error condition. Fill in the variables with empty structures. projectLanguageList = new List <ConfigFileLanguage>(); ignoredProjectExtensions = new List <string>(); } if (success == false) { return(false); } // Combine the ignored extensions. StringSet ignoredExtensions = new StringSet(KeySettingsForExtensions); foreach (string extension in ignoredSystemExtensions) { ignoredExtensions.Add(extension); } foreach (string extension in ignoredProjectExtensions) { ignoredExtensions.Add(extension); } // Add the languages. We don't need to do separate passes for standard entries and alter entries because alter // entries should only appear in the project file and only apply to types in the system file. Anything else is either an // error (system file can't alter a project entry) or would have been simplified out by LoadFile (a file with an alter // entry applying to a language in the same file.) Start_AddLanguage() also prevents inappropriate properties from // being set on languages, like Line Comment on one with full language support. foreach (ConfigFileLanguage configFileLanguage in systemLanguageList) { if (!Start_AddLanguage(configFileLanguage, systemFile, true, ignoredExtensions, errorList)) { success = false; } } foreach (ConfigFileLanguage configFileLanguage in projectLanguageList) { if (!Start_AddLanguage(configFileLanguage, projectFile, false, ignoredExtensions, errorList)) { success = false; } } if (success == false) { return(false); } // Now that everything's in languages we can delete the ones that weren't in the config files, such as predefined // languages that were removed or languages that were in the binary file from the last run but were deleted. We // have to put them on a list and delete them in a second pass because deleting them while iterating through would // screw up the iterator. List <string> deletedLanguageNames = new List <string>(); foreach (Language language in languages) { if (language.InConfigFiles == false) { deletedLanguageNames.Add(language.Name); // Check this flag so we don't set it to changed if we're deleting a predefined language that wasn't in the binary // file. if (language.InBinaryFile == true) { newStartupIssues |= StartupIssues.NeedToReparseAllFiles; } } } foreach (string deletedLanguageName in deletedLanguageNames) { languages.Remove(deletedLanguageName); } // Everything is okay at this point. Save the files again to reformat them. If the project file didn't exist, saving it // with the empty structures will create it. Start_FixCapitalization(systemLanguageList); Start_FixCapitalization(projectLanguageList); if (!languagesTxtParser.Save(projectFile, projectLanguageList, ignoredProjectExtensions, errorList, true, false)) { success = false; } ; if (!languagesTxtParser.Save(systemFile, systemLanguageList, ignoredSystemExtensions, errorList, false, true)) { success = false; } ; // Generate alternate comment styles. We don't want these included in the config files but we do want them in the // binary files in case the generation method changes in a future version. foreach (Language language in languages) { if (language.Type == Language.LanguageType.BasicSupport) { language.GenerateJavadocCommentStrings(); language.GenerateXMLCommentStrings(); } } // Compare the structures with the binary ones to see if anything changed. if (binaryLanguages.Count != languages.Count || binaryAliases.Count != aliases.Count || binaryExtensions.Count != extensions.Count || binaryShebangStrings.Count != shebangStrings.Count || binaryIgnoredExtensions.Count != ignoredExtensions.Count) { newStartupIssues |= StartupIssues.NeedToReparseAllFiles; } // Only bother to do a detailed comparison if we're not already reparsing everything. else if (!EngineInstance.HasIssues(StartupIssues.NeedToReparseAllFiles) && (newStartupIssues & StartupIssues.NeedToReparseAllFiles) == 0) { bool changed = false; foreach (Language binaryLanguage in binaryLanguages) { Language language = languages[binaryLanguage.Name]; if (language == null || binaryLanguage != language) { changed = true; break; } } if (changed == false) { foreach (string binaryIgnoredExtension in binaryIgnoredExtensions) { if (ignoredExtensions.Contains(binaryIgnoredExtension) == false) { changed = true; break; } } } if (changed == false) { foreach (KeyValuePair <string, int> binaryAliasPair in binaryAliases) { // We can use ID instead of Name because we know they match now. if (aliases.ContainsKey(binaryAliasPair.Key) == false || aliases[binaryAliasPair.Key].ID != binaryAliasPair.Value) { changed = true; break; } } } if (changed == false) { foreach (KeyValuePair <string, int> binaryExtensionPair in binaryExtensions) { // We can use ID instead of Name because we know they match now. if (extensions.ContainsKey(binaryExtensionPair.Key) == false || extensions[binaryExtensionPair.Key].ID != binaryExtensionPair.Value) { changed = true; break; } } } if (changed == false) { foreach (KeyValuePair <string, int> binaryShebangStringPair in binaryShebangStrings) { // We can use ID instead of Name because we know they match now. if (shebangStrings.ContainsKey(binaryShebangStringPair.Key) == false || shebangStrings[binaryShebangStringPair.Key].ID != binaryShebangStringPair.Value) { changed = true; break; } } } if (changed) { newStartupIssues |= StartupIssues.NeedToReparseAllFiles; } } languagesNDParser.Save(EngineInstance.Config.WorkingDataFolder + "/Languages.nd", languages, aliases, extensions, shebangStrings, ignoredExtensions); if (newStartupIssues != StartupIssues.None) { EngineInstance.AddStartupIssues(newStartupIssues); } return(success); }
/* Function: TestFolder * Tests all the input files contained in this folder. See <EngineInstanceManager.Start()> for how relative paths are handled. */ public void TestFolder(Path testDataFolder, Path projectConfigFolder = default(Path)) { TestList allTests = new TestList(); StringSet expectedOutputFiles = new StringSet(); engineInstanceManager = new EngineInstanceManager(); engineInstanceManager.Start(testDataFolder, projectConfigFolder); // Store this so we can still use it for error messages after the engine is disposed of. Path inputFolder = engineInstanceManager.InputFolder; try { engineInstanceManager.Run(); // Iterate through classes to build output files. using (Engine.CodeDB.Accessor accessor = EngineInstance.CodeDB.GetAccessor()) { // Class IDs should be assigned sequentially. It's not an ideal way to do this though. int classID = 1; accessor.GetReadOnlyLock(); try { for (;;) { List <Topic> classTopics = accessor.GetTopicsInClass(classID, Delegates.NeverCancel); Engine.Output.Components.ClassView.MergeTopics(classTopics, engineInstanceManager.HTMLBuilder); if (classTopics == null || classTopics.Count == 0) { break; } string testName = classTopics[0].ClassString.Symbol.FormatWithSeparator("."); Path outputFilePath = Test.ActualOutputFileOf(testName, inputFolder); Test test = Test.FromActualOutputFile(outputFilePath); expectedOutputFiles.Add(test.ExpectedOutputFile); try { test.SetActualOutput(OutputOf(classTopics)); } catch (Exception e) { test.TestException = e; } test.Run(); allTests.Add(test); classID++; } } finally { accessor.ReleaseLock(); } } // Now search for any expected output files that didn't have corresponding actual output files. string[] files = System.IO.Directory.GetFiles(inputFolder); foreach (string file in files) { if (Test.IsExpectedOutputFile(file) && expectedOutputFiles.Contains(file) == false) { Test test = Test.FromExpectedOutputFile(file); test.Run(); allTests.Add(test); expectedOutputFiles.Add(file); } } } finally { engineInstanceManager.Dispose(); engineInstanceManager = null; } if (allTests.Count == 0) { Assert.Fail("There were no tests found in " + inputFolder); } else if (allTests.Passed == false) { Assert.Fail(allTests.BuildFailureMessage()); } }
public void ShouldAddStringAndContainsReturnTrue() { var stringSet = new StringSet(); stringSet.Add("Test"); Assert.IsTrue(stringSet.Contains("Test")); }
private bool?HandleEndTag(Token t, HtmlTreeBuilder tb) { Token.EndTag endTag = t.AsEndTag(); string name = endTag.Name; if (name.Equals("body")) { if (!tb.InScope("body")) { tb.Error(this); return(false); } else { // TODO: error if stack contains something not dd, dt, li, optgroup, option, p, rp, rt, tbody, td, tfoot, th, thead, tr, body, html tb.Transition(AfterBody); } } else if (name.Equals("html")) { bool notIgnored = tb.Process(new Token.EndTag("body")); if (notIgnored) { return(tb.Process(endTag)); } } else if (StringSet.Create(@"address article aside blockquote button center details dir div dl fieldset figcaption figure footer header hgroup listing menu nav ol pre section summary ul").Contains(name)) { // TODO: refactor these lookups if (!tb.InScope(name)) { // nothing to close tb.Error(this); return(false); } else { tb.GenerateImpliedEndTags(); if (!tb.CurrentElement.NodeName.Equals(name)) { tb.Error(this); } tb.PopStackToClose(name); } } else if (name.Equals("form")) { HtmlElement currentForm = tb.FormElement; tb.FormElement = null; if (currentForm == null || !tb.InScope(name)) { tb.Error(this); return(false); } else { tb.GenerateImpliedEndTags(); if (!tb.CurrentElement.NodeName.Equals(name)) { tb.Error(this); } // remove currentForm from stack. will shift anything under up. tb.RemoveFromStack(currentForm); } } else if (name.Equals("p")) { if (!tb.InButtonScope(name)) { tb.Error(this); tb.Process(new Token.StartTag(name)); // if no p to close, creates an empty <p></p> return(tb.Process(endTag)); } else { tb.GenerateImpliedEndTags(name); if (!tb.CurrentElement.NodeName.Equals(name)) { tb.Error(this); } tb.PopStackToClose(name); } } else if (name.Equals("li")) { if (!tb.InListItemScope(name)) { tb.Error(this); return(false); } else { tb.GenerateImpliedEndTags(name); if (!tb.CurrentElement.NodeName.Equals(name)) { tb.Error(this); } tb.PopStackToClose(name); } } else if (StringUtil.In(name, "dd", "dt")) { if (!tb.InScope(name)) { tb.Error(this); return(false); } else { tb.GenerateImpliedEndTags(name); if (!tb.CurrentElement.NodeName.Equals(name)) { tb.Error(this); } tb.PopStackToClose(name); } } else if (HeadingTags.Contains(name)) { if (!tb.InScope(HeadingTags)) { tb.Error(this); return(false); } else { tb.GenerateImpliedEndTags(name); if (!tb.CurrentElement.NodeName.Equals(name)) { tb.Error(this); } tb.PopStackToClose(HeadingTags); } } else if (name.Equals("sarcasm")) { // *sigh* return(AnyOtherEndTag(t, tb)); } else if (StringSet.Create("a b big code em font i nobr s small strike strong tt u").Contains(name)) { // Adoption Agency Algorithm. OUTER: for (int i = 0; i < 8; i++) { var formatEl = tb.GetActiveFormattingElement(name); if (formatEl == null) { return(AnyOtherEndTag(t, tb)); } else if (!tb.OnStack(formatEl)) { tb.Error(this); tb.RemoveFromActiveFormattingElements(formatEl); return(true); } else if (!tb.InScope(formatEl.NodeName)) { tb.Error(this); return(false); } else if (tb.CurrentElement != formatEl) { tb.Error(this); } DomContainer furthestBlock = null; DomContainer commonAncestor = null; bool seenFormattingElement = false; DescendableLinkedList <DomContainer> stack = tb.Stack; for (int si = 0; si < stack.Count; si++) { DomContainer el = stack.ElementAt(si); if (el == formatEl) { commonAncestor = stack.ElementAt(si - 1); seenFormattingElement = true; } else if (seenFormattingElement && tb.IsSpecial(el)) { furthestBlock = el; break; } } if (furthestBlock == null) { tb.PopStackToClose(formatEl.NodeName); tb.RemoveFromActiveFormattingElements(formatEl); return(true); } // TODO: Let a bookmark note the position of the formatting element in the list of active formatting elements relative to the elements on either side of it in the list. // does that mean: int pos of format el in list? DomContainer node = furthestBlock; DomContainer lastNode = furthestBlock; INNER: for (int j = 0; j < 3; j++) { continueINNER: if (tb.OnStack(node)) { node = tb.AboveOnStack(node); } if (!tb.IsInActiveFormattingElements(node)) // note no bookmark check { tb.RemoveFromStack(node); goto continueINNER; } else if (node == formatEl) { goto breakINNER; } HtmlElement replacement = new HtmlElement(node.NodeName); tb.ReplaceActiveFormattingElement(node, replacement); tb.ReplaceOnStack(node, replacement); node = replacement; if (lastNode == furthestBlock) { // TODO: move the aforementioned bookmark to be immediately after the new node in the list of active formatting elements. // not getting how this bookmark both straddles the element above, but is inbetween here... } if (lastNode.Parent != null) { lastNode.Remove(); } node.Append(lastNode); lastNode = node; } breakINNER: if (StringUtil.In(commonAncestor.NodeName, "table", "tbody", "tfoot", "thead", "tr")) { if (lastNode.Parent != null) { lastNode.Remove(); } tb.InsertInFosterParent(lastNode); } else { if (lastNode.Parent != null) { lastNode.Remove(); } commonAncestor.Append(lastNode); } HtmlElement adopter = new HtmlElement(name); var childNodes = furthestBlock.ChildNodes.ToArray(); foreach (var childNode in childNodes) { adopter.Append(childNode); // append will reparent. thus the clone to avvoid concurrent mod. } furthestBlock.Append(adopter); tb.RemoveFromActiveFormattingElements(formatEl); // TODO: insert the new element into the list of active formatting elements at the position of the aforementioned bookmark. tb.RemoveFromStack(formatEl); tb.InsertOnStackAfter(furthestBlock, adopter); } } else if (StringUtil.In(name, "applet", "marquee", "object")) { if (!tb.InScope("name")) { if (!tb.InScope(name)) { tb.Error(this); return(false); } tb.GenerateImpliedEndTags(); if (!tb.CurrentElement.NodeName.Equals(name)) { tb.Error(this); } tb.PopStackToClose(name); tb.ClearFormattingElementsToLastMarker(); } } else if (name.Equals("br")) { tb.Error(this); tb.Process(new Token.StartTag("br")); return(false); } else { return(AnyOtherEndTag(t, tb)); } return(null); }
private void GenerateEntities() { _typeRegistry = _dbModel.Driver.TypeRegistry; var viewPrefix = _dbModel.Config.NamingPolicy.ViewPrefix; var tablePrefix = _dbModel.Config.NamingPolicy.TablePrefix; //track uniqueness of type names - we might have trouble if we have 2 tables with the same name in different schemas var typeNames = new StringSet(); foreach (var table in _dbModel.Tables) { if (_config.IgnoreTables.Contains(table.TableName)) { continue; } var module = GetModule(table.Schema); var entName = DbNameToCsName(table.TableName); switch (table.Kind) { case EntityKind.Table: if (string.IsNullOrWhiteSpace(tablePrefix) && entName.StartsWith(tablePrefix)) { entName = entName.Substring(tablePrefix.Length); } break; case EntityKind.View: if (!string.IsNullOrWhiteSpace(viewPrefix) && entName.StartsWith(viewPrefix)) { entName = entName.Substring(viewPrefix.Length); } break; } if (_config.Options.IsSet(DbFirstOptions.ChangeEntityNamesToSingular)) { entName = StringHelper.Unpluralize(entName); } entName = "I" + entName; if (typeNames.Contains(entName)) { entName = entName + "_" + table.Schema; } var entType = CreateDummyEntityType(entName); //dummy type, just to have unique type instance typeNames.Add(entName); // we add only entity types for tables; views are ignored (we do not have queires to create view definitions) if (table.Kind == EntityKind.Table) { module.Entities.Add(entType); // register type in module } // Note: we generate entity interfaces for Views, but do not register them as entities var ent = new EntityInfo(module, entType, table.Kind); ent.TableName = table.TableName; table.Entity = ent; _entityModel.RegisterEntity(ent); // generate entity members foreach (var col in table.Columns) { var nullable = col.Flags.IsSet(DbColumnFlags.Nullable); var memberDataType = GetMemberType(col); var memberName = CheckMemberName(DbNameToCsName(col.ColumnName), ent); var member = col.Member = new EntityMemberInfo(ent, MemberKind.Column, memberName, memberDataType); member.ColumnName = col.ColumnName; // member is added to ent.Members automatically in constructor if (nullable) { member.Flags |= EntityMemberFlags.Nullable; // in case it is not set (for strings) } if (col.Flags.IsSet(DbColumnFlags.Identity)) { member.Flags |= EntityMemberFlags.Identity; member.AutoValueType = AutoType.Identity; } //hack if (col.TypeInfo.VendorDbType.TypeName == "timestamp") { member.AutoValueType = AutoType.RowVersion; } member.Size = (int)col.TypeInfo.Size; member.Scale = col.TypeInfo.Scale; member.Precision = col.TypeInfo.Precision; //Check if we need to specify DbType or DbType spec explicitly bool isMemo = member.Size < 0; if (isMemo) { member.Flags |= EntityMemberFlags.UnlimitedSize; } var typeDef = col.TypeInfo.VendorDbType; var dftTypeDef = _typeRegistry.FindVendorDbTypeInfo(member.DataType, isMemo); if (typeDef == dftTypeDef) { continue; //no need for explicit DbType } /* * bool typeIsDefault = typeDef.ColumnOutType == dataType && typeDef.Flags.IsSet(VendorDbTypeFlags.IsDefaultForClrType); * if (typeIsDefault) * continue; //no need for explicit DbType */ //DbTypeDef is not default for this member - we need to specify DbType or TypeSpec explicitly // Let's see if explicit DbType is enough; let's try to search by DbType and check if it brings the same db type var vendorTypeDef = _typeRegistry.FindVendorDbTypeInfo(col.TypeInfo.VendorDbType.DbType, memberDataType, isMemo); if (vendorTypeDef == typeDef) { member.ExplicitDbType = col.TypeInfo.DbType; //Explicit db type is enough } else { member.ExplicitDbTypeSpec = col.TypeInfo.SqlTypeSpec; } } } //foreach table } //method
private StringSet FilterBracesInExpectedSet(StringSet stateExpectedSet) { var result = new StringSet(); result.UnionWith(stateExpectedSet); //Find what brace we expect var nextClosingBrace = string.Empty; if (OpenBraces.Count > 0) { var lastOpenBraceTerm = OpenBraces.Peek().KeyTerm; var nextClosingBraceTerm = lastOpenBraceTerm.IsPairFor as KeyTerm; if (nextClosingBraceTerm != null) nextClosingBrace = nextClosingBraceTerm.Text; } //Now check all closing braces in result set, and leave only nextClosingBrace foreach(var closingBrace in Language.GrammarData.ClosingBraces) { if (result.Contains(closingBrace) && closingBrace != nextClosingBrace) result.Remove(closingBrace); } return result; }
/* Function: Start_AddLanguage * A helper function that is used only by <Start()> to add a <ConfigFileLanguage> into <languages>. * Returns whether it was able to do so without any errors. */ private bool Start_AddLanguage(ConfigFileLanguage configFileLanguage, Path sourceFile, bool isSystemFile, StringSet ignoredExtensions, Errors.ErrorList errorList) { bool success = true; // Validate or create the language. if (configFileLanguage.AlterLanguage == true) { // If altering a language that doesn't exist at all, at least not in the config files... if (languages.Contains(configFileLanguage.Name) == false || languages[configFileLanguage.Name].InConfigFiles == false) { errorList.Add( Locale.Get("NaturalDocs.Engine", "Languages.txt.AlteredLanguageDoesntExist(name)", configFileLanguage.Name), sourceFile, configFileLanguage.LineNumber ); success = false; } } else // define language, not alter { // Error if defining a language that already exists in the config files. Having it exist otherwise is fine. if (languages.Contains(configFileLanguage.Name)) { if (languages[configFileLanguage.Name].InConfigFiles == true) { errorList.Add( Locale.Get("NaturalDocs.Engine", "Languages.txt.LanguageAlreadyExists(name)", configFileLanguage.Name), sourceFile, configFileLanguage.LineNumber ); success = false; } } else { Language newLanguage = new Language(this, configFileLanguage.Name); languages.Add(newLanguage); } if (isSystemFile) { languages[configFileLanguage.Name].InSystemFile = true; } else { languages[configFileLanguage.Name].InProjectFile = true; } } if (success == false) { return(false); } // Apply the properties. Language language = languages[configFileLanguage.Name]; if (configFileLanguage.SimpleIdentifier != null) { language.SimpleIdentifier = configFileLanguage.SimpleIdentifier; } if (configFileLanguage.LineCommentStrings != null) { if (language.Type != Language.LanguageType.BasicSupport) { Start_CantDefinePropertyError(configFileLanguage, language.Type, sourceFile, "Line Comment", errorList); success = false; } else { language.LineCommentStrings = configFileLanguage.LineCommentStrings; } } if (configFileLanguage.BlockCommentStringPairs != null) { if (language.Type != Language.LanguageType.BasicSupport) { Start_CantDefinePropertyError(configFileLanguage, language.Type, sourceFile, "Block Comment", errorList); success = false; } else { language.BlockCommentStringPairs = configFileLanguage.BlockCommentStringPairs; } } if (configFileLanguage.MemberOperator != null) { if (language.Type != Language.LanguageType.BasicSupport && language.Type != Language.LanguageType.TextFile) { Start_CantDefinePropertyError(configFileLanguage, language.Type, sourceFile, "Member Operator", errorList); success = false; } else { language.MemberOperator = configFileLanguage.MemberOperator; } } if (configFileLanguage.LineExtender != null) { if (language.Type != Language.LanguageType.BasicSupport) { Start_CantDefinePropertyError(configFileLanguage, language.Type, sourceFile, "Line Extender", errorList); success = false; } else { language.LineExtender = configFileLanguage.LineExtender; } } if (configFileLanguage.EnumValue != null) { if (language.Type != Language.LanguageType.BasicSupport && language.Type != Language.LanguageType.TextFile) { Start_CantDefinePropertyError(configFileLanguage, language.Type, sourceFile, "Enum Value", errorList); success = false; } else { language.EnumValue = (Language.EnumValues)configFileLanguage.EnumValue; } } if (configFileLanguage.CaseSensitive != null) { if (language.Type != Language.LanguageType.BasicSupport && language.Type != Language.LanguageType.TextFile) { Start_CantDefinePropertyError(configFileLanguage, language.Type, sourceFile, "Case Sensitive", errorList); success = false; } else { language.CaseSensitive = (bool)configFileLanguage.CaseSensitive; } } string[] commentTypeNamesWithPrototypeEnders = configFileLanguage.GetCommentTypeNamesWithPrototypeEnders(); if (commentTypeNamesWithPrototypeEnders != null) { if (language.Type != Language.LanguageType.BasicSupport) { Start_CantDefinePropertyError(configFileLanguage, language.Type, sourceFile, "Prototype Enders", errorList); success = false; } else { foreach (string commentTypeName in commentTypeNamesWithPrototypeEnders) { CommentTypes.CommentType commentType = EngineInstance.CommentTypes.FromName(commentTypeName); if (commentType == null) { errorList.Add( Locale.Get("NaturalDocs.Engine", "Languages.txt.PrototypeEnderCommentTypeDoesntExist(name)", commentTypeName), sourceFile, configFileLanguage.LineNumber ); success = false; } else { string[] prototypeEnderStrings = configFileLanguage.GetPrototypeEnderStrings(commentTypeName); PrototypeEnders prototypeEnders = new PrototypeEnders(prototypeEnderStrings); language.SetPrototypeEnders(commentType.ID, prototypeEnders); } } } } // Apply the aliases, extensions, and shebang strings. if (configFileLanguage.Aliases != null) { // If using Replace Aliases, find all existing aliases pointing to this language and remove them. if (configFileLanguage.AlterLanguage == true && configFileLanguage.AddAliases == false) { List <string> removedAliases = new List <string>(); foreach (KeyValuePair <string, Language> pair in aliases) { if ((object)pair.Value == (object)language) { removedAliases.Add(pair.Key); } } foreach (string removedAlias in removedAliases) { aliases.Remove(removedAlias); } } // Add new aliases. foreach (string alias in configFileLanguage.Aliases) { aliases[alias] = language; } } if (configFileLanguage.Extensions != null) { // If using Replace Extensions, find all existing extensions pointing to this language and remove them. if (configFileLanguage.AlterLanguage == true && configFileLanguage.AddExtensions == false) { List <string> removedExtensions = new List <string>(); foreach (KeyValuePair <string, Language> pair in extensions) { if ((object)pair.Value == (object)language) { removedExtensions.Add(pair.Key); } } foreach (string removedExtension in removedExtensions) { extensions.Remove(removedExtension); } } // Add new extensions. foreach (string extension in configFileLanguage.Extensions) { if (ignoredExtensions.Contains(extension) == false) { extensions[extension] = language; } } } if (configFileLanguage.ShebangStrings != null) { // If using Replace Shebang Strings, find all existing shebang strings pointing to this language and remove them. if (configFileLanguage.AlterLanguage == true && configFileLanguage.AddShebangStrings == false) { List <string> removedShebangStrings = new List <string>(); foreach (KeyValuePair <string, Language> pair in shebangStrings) { if ((object)pair.Value == (object)language) { removedShebangStrings.Add(pair.Key); } } foreach (string removedShebangString in removedShebangStrings) { shebangStrings.Remove(removedShebangString); } } // Add new shebang strings. foreach (string shebangString in configFileLanguage.ShebangStrings) { shebangStrings[shebangString] = language; } } return(success); }
/* Function: Start_ApplyProperties * A helper function that is used only by <Start()> to combine a <ConfigFileCommentType's> properties into <commentTypes> and * its keywords into <singularKeywords> and <pluralKeywords>. Assumes entries were already created for all of them by * <Start_CreateType()>. Returns whether it was able to do so without causing an error. */ private bool Start_ApplyProperties(ConfigFileCommentType configFileCommentType, Path sourceFile, StringSet ignoredKeywords, Errors.ErrorList errorList) { CommentType commentType = commentTypes[configFileCommentType.Name]; bool success = true; // Display names if (configFileCommentType.DisplayNameFromLocale != null) { commentType.DisplayName = Locale.Get("NaturalDocs.Engine", configFileCommentType.DisplayNameFromLocale); } else if (configFileCommentType.DisplayName != null) { commentType.DisplayName = configFileCommentType.DisplayName; } if (configFileCommentType.PluralDisplayNameFromLocale != null) { commentType.PluralDisplayName = Locale.Get("NaturalDocs.Engine", configFileCommentType.PluralDisplayNameFromLocale); } else if (configFileCommentType.PluralDisplayName != null) { commentType.PluralDisplayName = configFileCommentType.PluralDisplayName; } // Other properties if (configFileCommentType.SimpleIdentifier != null) { commentType.SimpleIdentifier = configFileCommentType.SimpleIdentifier; } if (configFileCommentType.Index != null) { commentType.Index = (CommentType.IndexValue)configFileCommentType.Index; if (commentType.Index == CommentType.IndexValue.IndexWith) { CommentType indexWithCommentType = commentTypes[configFileCommentType.IndexWith]; if (indexWithCommentType == null) { errorList.Add( Locale.Get("NaturalDocs.Engine", "Comments.txt.IndexWithCommentTypeDoesntExist(name)", configFileCommentType.IndexWith), sourceFile, configFileCommentType.LineNumber ); success = false; } else { commentType.IndexWith = indexWithCommentType.ID; } } } if (configFileCommentType.Scope != null) { commentType.Scope = (CommentType.ScopeValue)configFileCommentType.Scope; } if (configFileCommentType.Flags.AllConfigurationProperties != 0) { commentType.Flags.AllConfigurationProperties = configFileCommentType.Flags.AllConfigurationProperties; } if (configFileCommentType.BreakLists != null) { commentType.BreakLists = (bool)configFileCommentType.BreakLists; } // Keywords List <string> keywords = configFileCommentType.Keywords; for (int i = 0; i < keywords.Count; i += 2) { if (keywords[i] != null && !ignoredKeywords.Contains(keywords[i])) { singularKeywords.Add(keywords[i], commentType); pluralKeywords.Remove(keywords[i]); } if (keywords[i + 1] != null && !ignoredKeywords.Contains(keywords[i + 1])) { singularKeywords.Remove(keywords[i + 1]); pluralKeywords.Add(keywords[i + 1], commentType); } } return(success); }
private void AnalyzeToolDriver(ToolComponent toolComponent, string toolDriverPointer) { if (!string.IsNullOrEmpty(toolComponent.Name)) { const int MaxWords = 3; int wordCount = toolComponent.Name.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).Length; if (wordCount > MaxWords) { string driverNamePointer = toolDriverPointer.AtProperty(SarifPropertyName.Name); // {0}: The tool name '{1}' contains {2} words, which is more than the recommended // maximum of {3} words. A short tool name is easy to remember and fits into a // narrow column when displaying a list of results. If you need to provide more // information about your tool, use the 'fullName' property. LogResult( driverNamePointer, nameof(RuleResources.SARIF2005_ProvideToolProperties_Warning_ProvideConciseToolName_Text), toolComponent.Name, wordCount.ToString(), MaxWords.ToString()); } } bool informationUriRequired = this.Context.Policy.GetProperty(InformationUriRequired); if (informationUriRequired && toolComponent.InformationUri == null) { // {0}: The tool '{1}' does not provide 'informationUri'. This property helps the // developer responsible for addessing a result by providing a way to learn more // about the tool. LogResult( toolDriverPointer, nameof(RuleResources.SARIF2005_ProvideToolProperties_Warning_ProvideToolnformationUri_Text), toolComponent.Name); } StringSet acceptableVersionProperties = this.Context.Policy.GetProperty(AcceptableVersionProperties); bool toolDriverProvidesVersion = false; toolDriverProvidesVersion |= acceptableVersionProperties.Contains(nameof(toolComponent.Version)) && !string.IsNullOrWhiteSpace(toolComponent.Version); toolDriverProvidesVersion |= acceptableVersionProperties.Contains(nameof(toolComponent.SemanticVersion)) && !string.IsNullOrWhiteSpace(toolComponent.SemanticVersion); toolDriverProvidesVersion |= acceptableVersionProperties.Contains(nameof(toolComponent.DottedQuadFileVersion)) && !string.IsNullOrWhiteSpace(toolComponent.DottedQuadFileVersion); if (!toolDriverProvidesVersion) { // {0}: The tool '{1}' does not provide any of the version-related properties {2}. // Providing version information enables the log file consumer to determine whether // the file was produced by an up to date version, and to avoid accidentally // comparing log files produced by different tool versions. LogResult( toolDriverPointer, nameof(RuleResources.SARIF2005_ProvideToolProperties_Warning_ProvideToolVersion_Text), toolComponent.Name, $"'{string.Join("', '", acceptableVersionProperties.Select(ToCamelCase))}'"); } else { if (!string.IsNullOrWhiteSpace(toolComponent.Version)) { AnalyzeVersion(toolComponent.Name, toolComponent.Version, toolDriverPointer.AtProperty(SarifPropertyName.Version)); } } }
private void GenerateEntities() { var entNames = new StringSet(); _typeRegistry = _dbModel.Driver.TypeRegistry; var viewPrefix = "v"; var tablePrefix = string.Empty; foreach (var table in _dbModel.Tables) { if (_config.IgnoreTables.Contains(table.TableName)) { continue; } var module = GetModule(table.Schema); var entName = DbNameToCsName(table.TableName); switch (table.Kind) { case EntityKind.Table: if (string.IsNullOrWhiteSpace(tablePrefix) && entName.StartsWith(tablePrefix)) { entName = entName.Substring(tablePrefix.Length); } break; case EntityKind.View: if (!string.IsNullOrWhiteSpace(viewPrefix) && entName.StartsWith(viewPrefix)) { entName = entName.Substring(viewPrefix.Length); } break; } if (_config.Options.IsSet(DbFirstOptions.ChangeEntityNamesToSingular)) { entName = StringHelper.Unpluralize(entName); } entName = "I" + entName; //track uniqueness of type names - we might have trouble if we have 2 tables with the same name in different schemas if (entNames.Contains(entName)) { entName = entName + "_" + table.Schema; } var entType = typeof(object); // CreateDummyEntityType(entName); //dummy type, just to have unique type instance // we add only entity types for tables; views are ignored (we do not have queires to create view definitions) if (table.Kind == EntityKind.Table) { module.Entities.Add(entType); // register type in module } // Note: we generate entity interfaces for Views, but do not register them as entities var ent = new EntityInfo(module, entType, table.Kind); ent.TableName = table.TableName; ent.Name = entName; table.Entity = ent; _entityModel.RegisterEntity(ent); entNames.Add(entName); // generate entity members foreach (var col in table.Columns) { var nullable = col.Flags.IsSet(DbColumnFlags.Nullable); var memberDataType = GetMemberType(col); var memberName = CheckMemberName(DbNameToCsName(col.ColumnName), ent); var member = col.Member = new EntityMemberInfo(ent, EntityMemberKind.Column, memberName, memberDataType); member.ColumnName = col.ColumnName; // member is added to ent.Members automatically in constructor if (nullable) { member.Flags |= EntityMemberFlags.Nullable; // in case it is not set (for strings) } if (col.Flags.IsSet(DbColumnFlags.Identity)) { member.Flags |= EntityMemberFlags.Identity; member.AutoValueType = AutoType.Identity; } //hack for MS SQL if (col.TypeInfo.TypeDef.Name == "timestamp") { member.AutoValueType = AutoType.RowVersion; } member.Size = (int)col.TypeInfo.Size; member.Scale = col.TypeInfo.Scale; member.Precision = col.TypeInfo.Precision; //Check if we need to specify DbType or DbType spec explicitly bool unlimited = member.Size < 0; if (unlimited) { member.Flags |= EntityMemberFlags.UnlimitedSize; } var typeDef = col.TypeInfo.TypeDef; // Detect if we need to set explicity DbType or DbTypeSpec in member attribute var dftMapping = _typeRegistry.GetDbTypeInfo(member); if (col.TypeInfo.Matches(dftMapping)) { continue; //no need for explicit DbTypeSpec } //DbTypeMapping is not default for this member - we need to specify DbType or TypeSpec explicitly member.ExplicitDbTypeSpec = col.TypeInfo.DbTypeSpec; } } //foreach table } //method
public override DataTable GetTableConstraints() { var dtAll = GetSchemaCollection("ForeignKeys"); //this gives us only foreign keys // We need to add PKs; Each PK in SQLite is 'supported' by an index named 'sqlite_autoindex_*' // We scan index columns to pick up such names and add PK rows to dtAll. //Add PKs by scanning index columns and finding special-named indexes (starting with sqlite_autoindex) var dtIndexes = GetIndexColumns(); var tNames = new StringSet(); //track tables to prevent duplicates foreach(DataRow row in dtIndexes.Rows) { var ixName = row.GetAsString("INDEX_NAME"); if(!IsPrimaryKeyIndex(ixName)) continue; var tblName = row.GetAsString("TABLE_NAME"); if (tNames.Contains(tblName)) continue; //don't add duplicates tNames.Add(tblName); //it is auto-index for PK, create a row for the index var pkRow = dtAll.NewRow(); pkRow["TABLE_NAME"] = tblName; pkRow["CONSTRAINT_NAME"] = row.GetAsString("INDEX_NAME"); pkRow["CONSTRAINT_TYPE"] = "PRIMARY KEY"; dtAll.Rows.Add(pkRow); } return dtAll; }