// Computes set of expected terms in a parser state. While there may be extended list of symbols expected at some point, // we want to reorganize and reduce it. For example, if the current state expects all arithmetic operators as an input, // it would be better to not list all operators (+, -, *, /, etc) but simply put "operator" covering them all. // To achieve this grammar writer can group operators (or any other terminals) into named groups using Grammar's methods // AddTermReportGroup, AddNoReportGroup etc. Then instead of reporting each operator separately, Irony would include // a single "group name" to represent them all. // The "expected report set" is not computed during parser construction (it would bite considerable time), but on demand during parsing, // when error is detected and the expected set is actually needed for error message. // Multi-threading concerns. When used in multi-threaded environment (web server), the LanguageData would be shared in // application-wide cache to avoid rebuilding the parser data on every request. The LanguageData is immutable, except // this one case - the expected sets are constructed late by CoreParser on the when-needed basis. // We don't do any locking here, just compute the set and on return from this function the state field is assigned. // We assume that this field assignment is an atomic, concurrency-safe operation. The worst thing that might happen // is "double-effort" when two threads start computing the same set around the same time, and the last one to finish would // leave its result in the state field. #endregion internal static StringSet ComputeGroupedExpectedSetForState(Grammar grammar, ParserState state) { var terms = new TerminalSet(); terms.UnionWith(state.ExpectedTerminals); var result = new StringSet(); //Eliminate no-report terminals foreach (var group in grammar.TermReportGroups) { if (group.GroupType == TermReportGroupType.DoNotReport) { terms.ExceptWith(group.Terminals); } } //Add normal and operator groups foreach (var group in grammar.TermReportGroups) { if ((group.GroupType == TermReportGroupType.Normal || group.GroupType == TermReportGroupType.Operator) && terms.Overlaps(group.Terminals)) { result.Add(group.Alias); terms.ExceptWith(group.Terminals); } } //Add remaining terminals "as is" foreach (var terminal in terms) { result.Add(terminal.ErrorAlias); } return(result); }
public void Grow() { // 0 3 6 11 15 19 22 27 32 36 const string original = "onetwothreefourfivesixseveneightnineten"; var arr = original.ToCharArray(); var set = new StringSet(4); string str; set.Add(arr, 0, 3, out str); // one set.Add(arr, 3, 3, out str); // two set.Add(arr, 6, 5, out str); // three set.Add(arr, 11, 4, out str); // four Assert.AreEqual(4, set.MaxSize); // make sure we can find something Assert.AreEqual("two", set.GetExistingString(arr, 3, 3)); set.Add(arr, 15, 4, out str); // five Assert.Greater(set.MaxSize, 4, "The set should have expanded to greater than 4 maximum size."); set.Add(arr, 19, 3, out str); // six set.Add(arr, 22, 5, out str); // seven set.Add(arr, 27, 5, out str); // eight set.Add(arr, 32, 4, out str); // nine set.Add(arr, 36, 3, out str); // ten Assert.AreEqual(10, set.Count); Assert.GreaterOrEqual(set.MaxSize, set.Count); Assert.AreEqual("two", set.GetExistingString(arr, 3, 3)); Assert.AreEqual("seven", set.GetExistingString(arr, 22, 5)); }
public void Enumerate() { StringSet ss = new StringSet(); int i = 0; foreach (string s in ss) { i++; Console.WriteLine(s); } Assert.AreEqual(0, i); ss.Add("bloo"); ss.Add("foo"); ss.Add("bar"); foreach (string s in ss) { i++; Console.WriteLine(s); } Assert.AreEqual(3, i); string[] arr = ss.GetStrings(); Array.Sort(arr); Assert.AreEqual("bar", arr[0]); Assert.AreEqual("bloo", arr[1]); Assert.AreEqual("foo", arr[2]); Assert.AreEqual(3, arr.Length); }
public void Merge() { StringSet ss = new StringSet(); ss.Add("foo"); StringSet so = new StringSet(); so.Add("bar"); so.Add("baz"); ss.Add(so); Assert.AreEqual(3, ss.Count); so = new StringSet(); so.Add("boo"); so.Add("baz"); ss.Add(so); Assert.AreEqual(4, ss.Count); ss.Remove(so); Assert.AreEqual(2, ss.Count); Assert.IsTrue(ss["foo"]); Assert.IsTrue(ss["bar"]); Assert.IsFalse(ss["boo"]); Assert.IsFalse(ss["baz"]); Assert.IsFalse(ss["bloo"]); }
public void SearchCursor() { var one = "one"; var two = "two"; var three = "three"; var set = new StringSet(4); // pretend that we have some hash collisions var hash = StringHash.GetHash(one); set.Add(one, hash); set.Add(two, hash); set.Add(three, hash); var cursor = set.GetSearchCursor(hash); // add one more with the same hash to make sure the cursor doesn't change set.Add("four", hash); Assert.AreEqual(4, set.Count); Assert.AreEqual(4, set.MaxSize); // hash collisions shouldn't cause the set to grow Assert.True(cursor.MightHaveMore); Assert.AreSame(three, cursor.NextString()); Assert.True(cursor.MightHaveMore); Assert.AreSame(two, cursor.NextString()); Assert.True(cursor.MightHaveMore); Assert.AreSame(one, cursor.NextString()); Assert.False(cursor.MightHaveMore); }
private static StringSet BuildApprovedFunctionsStringSet() { var result = new StringSet(); result.Add("_TlgWrite"); result.Add("__vcrt_trace_logging_provider::_TlgWrite"); return(result); }
/// <summary> /// Add a single feature to the node. /// Does not fire OnFeatures, since this should mostly be used by /// things that are not querying externally. /// </summary> /// <param name="feature">The feature URI to add</param> public void AddFeature(string feature) { if (Features == null) { Features = new StringSet(); } Features.Add(feature); }
public void ShouldCount() { var stringSet = new StringSet(); stringSet.Add("Test"); stringSet.Add("Test"); stringSet.Add("Test"); Assert.AreEqual(3, stringSet.Count); }
public void ShouldClearTheSet() { var stringSet = new StringSet(); stringSet.Add("Test"); stringSet.Add("Test"); stringSet.Add("Test"); stringSet.Clear(); Assert.AreEqual(0, stringSet.Count); }
public void ShouldEnumerate() { var stringSet = new StringSet(); stringSet.Add("Test"); stringSet.Add("Test"); stringSet.Add("Test"); foreach (var test in stringSet) { Assert.IsTrue(stringSet.Contains(test)); } }
private static StringSet BuildApprovedFunctionsStringSet() { var result = new StringSet(); result.Add("_TlgWrite"); return(result); }
public override DbTable GetTableConstraints() { var dtAll = GetSchemaCollection("ForeignKeys"); //this gives us only foreign keys // We need to add PKs; Each PK in SQLite is 'supported' by an index named 'sqlite_autoindex_*' // We scan index columns to pick up such names and add PK rows to dtAll. //Add PKs by scanning index columns and finding special-named indexes (starting with sqlite_autoindex) var dtIndexes = GetIndexColumns(); var tNames = new StringSet(); //track tables to prevent duplicates foreach (DbRow row in dtIndexes.Rows) { var ixName = row.GetAsString("INDEX_NAME"); if (!IsPrimaryKeyIndex(ixName)) { continue; } var tblName = row.GetAsString("TABLE_NAME"); if (tNames.Contains(tblName)) { continue; //don't add duplicates } tNames.Add(tblName); //it is auto-index for PK, create a row for the index var pkRow = dtAll.AddRow(); pkRow["TABLE_NAME"] = tblName; pkRow["CONSTRAINT_NAME"] = row.GetAsString("INDEX_NAME"); pkRow["CONSTRAINT_TYPE"] = "PRIMARY KEY"; } return(dtAll); }
} //method protected void CheckAliases(TranslationContext context, IEnumerable <Expression> outExpressions) { var sqlExpressions = outExpressions.OfType <SqlExpression>().ToList(); var allNames = new StringSet(); foreach (var outExpr in sqlExpressions) { string outName = null; var col = outExpr as ColumnExpression; if (col != null) { //if (isView) col.Alias = col.Name; outName = col.Alias ?? col.Name; } var needsAlias = outName != null && allNames.Contains(outName); if (outName != null) { allNames.Add(outName); } if (needsAlias) { outExpr.Alias = CreateDefaultAlias(outExpr, allNames); } } //foreach outExpr } //method
//constructor public WebCallContextHandlerSettings(LogLevel logLevel = LogLevel.Basic, WebHandlerOptions options = WebHandlerOptions.DefaultDebug, string sessionToken = "Authorization", WebTokenType sessionTokenType = WebTokenType.Header, string versionToken = DefaultVersionToken, string csrfToken = null, DbConnectionReuseMode connectionReuseMode = DbConnectionReuseMode.KeepOpen) { LogLevel = logLevel; Options = options; ConnectionReuseMode = connectionReuseMode; if (sessionToken != null) { TokenHandlers.Add(new WebSessionTokenHandler(sessionToken, sessionTokenType)); } if (versionToken != null) { TokenHandlers.Add(new VersionTokenHandler(versionToken)); } // Cross-Site Request Forgery (CSRF) protection. Used as header only (not cookie), when session token is saved in cookie, // to protect against CSRF execution. Sometimes called synchronization token; read more in Wikipedia or other resources if (csrfToken != null) { TokenHandlers.Add(new WebTokenHandler(csrfToken, WebTokenType.Header, WebTokenDirection.InputOutput)); } //We ignore Swagger paths by default IgnorePaths.Add("/swagger"); }
public void GetExisting() { const string original = "thisthatthen"; var arr = original.ToCharArray(); var set = new StringSet(10); Assert.IsNull(set.GetExistingString(arr, 0, 4)); string str; Assert.True(set.Add(arr, 0, 4, out str)); Assert.False(set.Add(arr, 0, 4, out str)); Assert.AreEqual(str, set.GetExistingString(arr, 0, 4)); Assert.IsNull(set.GetExistingString(arr, 4, 4)); }
public void Add() { StringSet ss = new StringSet(); ss.Add("foo"); ss.Add("foo"); ss.Add("bar"); Assert.IsTrue(ss["foo"]); Assert.AreEqual(2, ss.Count); Assert.AreEqual("foo\r\nbar\r\n", ss.ToString()); ss.Remove("bar"); Assert.AreEqual(1, ss.Count); Assert.IsFalse(ss["fool"]); ss = new StringSet(new string[] { "foo", "bar"}); ss.Add(new StringSet("baz")); Assert.AreEqual(3, ss.Count); }
public void ShouldRemoveAndContainsReturnFalse() { var stringSet = new StringSet(); stringSet.Add("Test"); Assert.IsTrue(stringSet.Contains("Test")); stringSet.Remove("Test"); Assert.IsFalse(stringSet.Contains("Test")); }
/* Function: MergeIgnoredKeywordsInto * Merges the ignored keywords from the <ConfigFiles.TextFile> into a <StringSet>. */ protected void MergeIgnoredKeywordsInto(ref StringSet ignoredKeywords, ConfigFiles.TextFile textConfig) { if (textConfig.HasIgnoredKeywords) { foreach (var ignoredKeywordGroup in textConfig.IgnoredKeywordGroups) { foreach (var ignoredKeywordDefinition in ignoredKeywordGroup.KeywordDefinitions) { ignoredKeywords.Add(ignoredKeywordDefinition.Keyword); if (ignoredKeywordDefinition.HasPlural) { ignoredKeywords.Add(ignoredKeywordDefinition.Plural); } } } } }
public void StringEquals() { const string original = "thisthatthen"; var arr = original.ToCharArray(); var set = new StringSet(10); string str; set.Add(arr, 0, 4, out str); Assert.AreEqual("this", str); set.Add(arr, 4, 4, out str); Assert.AreEqual("that", str); set.Add(arr, 8, 4, out str); Assert.AreEqual("then", str); }
/* * Add a file as a dependency for the DLL we're building */ internal void AddSourceDependency(VirtualPath fileName) { if (_sourceDependencies == null) { _sourceDependencies = new CaseInsensitiveStringSet(); } _sourceDependencies.Add(fileName.VirtualPathString); }
public void ShouldIntersect() { var set1 = new StringSet(); var set2 = new StringSet(); set1.Add("Test1"); set1.Add("Common1"); set1.Add("Common2"); set2.Add("Common1"); set2.Add("Common2"); set2.Add("Test2"); StringSet intersect = set1.Intersect(set2); Assert.AreEqual(2, intersect.Count); Assert.IsTrue(intersect.Contains("Common1")); Assert.IsTrue(intersect.Contains("Common2")); }
/// <summary> /// Add a feature /// </summary> /// <returns></returns> public DiscoFeature AddFeature(string featureURI) { DiscoFeature i = CreateChildElement <DiscoFeature>(); i.Var = featureURI; if (m_features != null) { m_features.Add(featureURI); } return(i); }
public void TestStringSet() { using (StringSet set = new StringSet("test", true)) { set.Clear(); List <string> strings = new List <string>(); Random r = new Random(); for (int i = 0; i < 1000; i++) { strings.Add(RandomString(64)); } strings.Sort(); Assert.AreEqual(strings.Count, set.AddMany(strings)); Assert.AreEqual(0, set.AddMany(strings)); string[] existing = set.Enumerate().ToArray(); Assert.AreEqual(strings, existing); Assert.IsFalse(set.Contains("test")); Assert.IsFalse(set.Contains("test2")); Assert.IsTrue(set.Add("test")); Assert.IsTrue(set.Add("test2")); Assert.AreEqual(strings.Count + 2, set.GetCount()); Assert.IsFalse(set.Add("test")); Assert.IsFalse(set.Add("test2")); Assert.IsTrue(set.Contains("test")); Assert.IsTrue(set.Contains("test2")); Assert.IsFalse(set.Contains("nothing")); Assert.AreEqual(2, set.DeleteMany(new string[] { "test", "test2" })); Assert.AreEqual(strings.Count, set.GetCount()); Assert.IsFalse(set.Contains("test")); Assert.IsFalse(set.Contains("test2")); Assert.IsFalse(set.Contains("test3")); Assert.IsTrue(set.Add("test3")); Assert.AreEqual(strings.Count + 1, set.GetCount()); Assert.IsTrue(set.Delete("test3")); Assert.IsFalse(set.Delete("test3")); Assert.AreEqual(strings.Count, set.GetCount()); Assert.AreEqual(strings.Count, set.Clear()); Assert.AreEqual(0, set.GetCount()); Assert.AreEqual(new string[0], set.Enumerate()); } }
public void AddKeywordList(string keywordList) { string[] arr = keywordList.Split(' ', ',', ';', '\n', '\r', '\t'); foreach (string kw in arr) { string trimmed = kw.Trim(); if (!string.IsNullOrEmpty(trimmed)) { Keywords.Add(trimmed); } } }
protected void AddDependency(VirtualPath virtualPath) { virtualPath = ResolveVirtualPath(virtualPath); Debug.Trace("Template", "Parsed dependency: " + _virtualPath + " depends on " + virtualPath); if (_virtualPathDependencies == null) { _virtualPathDependencies = new CaseInsensitiveStringSet(); } _virtualPathDependencies.Add(virtualPath.VirtualPathString); }
internal static StringSet ComputeGroupedExpectedSetForState(Grammar grammar, ParserState state) { var terms = new TerminalSet(); terms.UnionWith(state.ExpectedTerminals); var result = new StringSet(); //Eliminate no-report terminals foreach(var group in grammar.TermReportGroups) if (group.GroupType == TermReportGroupType.DoNotReport) terms.ExceptWith(group.Terminals); //Add normal and operator groups foreach(var group in grammar.TermReportGroups) if((group.GroupType == TermReportGroupType.Normal || group.GroupType == TermReportGroupType.Operator) && terms.Overlaps(group.Terminals)) { result.Add(group.Alias); terms.ExceptWith(group.Terminals); } //Add remaining terminals "as is" foreach(var terminal in terms) result.Add(terminal.ErrorAlias); return result; }
public void ReferenceEquals() { const string original = "thisthatthen"; var arr = original.ToCharArray(); var set = new StringSet(10); string this1, that1, then1; set.Add(arr, 0, 4, out this1); set.Add(arr, 4, 4, out that1); set.Add(arr, 8, 4, out then1); string this2, that2, then2; set.Add(arr, 0, 4, out this2); set.Add(arr, 4, 4, out that2); set.Add(arr, 8, 4, out then2); Assert.AreEqual(this1, this2); Assert.AreEqual(that1, that2); Assert.AreEqual(then1, then2); Assert.AreSame(this1, this2); Assert.AreSame(that1, that2); Assert.AreSame(then1, then2); }
public StringSet GetAllFeatures(CapsManager caps) { if (caps == null) throw new ArgumentNullException("caps"); StringSet features = new StringSet(); foreach (Presence p in m_all) { StringSet f = GetFeatures(caps, p); if (f != null) features.Add(f); } return features; }
public void AddWorkItemModelTransformer(SarifWorkItemModelTransformer workItemModelTransformer) { StringSet assemblies = this.GetProperty(PluginAssemblyLocations); StringSet assemblyQualifiedNames = this.GetProperty(PluginAssemblyQualifiedNames); string assemblyLocation = workItemModelTransformer.GetType().Assembly.Location; string assemblyQualifiedName = workItemModelTransformer.GetType().AssemblyQualifiedName; assemblies.Add(assemblyLocation); assemblyQualifiedNames.Add(assemblyQualifiedName); this.workItemModelTransformers = this.workItemModelTransformers ?? new List <SarifWorkItemModelTransformer>(); this.workItemModelTransformers.Add(workItemModelTransformer); }
} //method #endregion #region Calculating Tail Firsts private void CalculateTailFirsts() { foreach (NonTerminal nt in _grammar.NonTerminals) { foreach (Production prod in nt.Productions) { StringSet accumulatedFirsts = new StringSet(); bool allNullable = true; //We are going backwards in LR0Items list for (int i = prod.LR0Items.Count - 1; i >= 0; i--) { LR0Item item = prod.LR0Items[i]; if (i >= prod.LR0Items.Count - 2) { //Last and before last items have empty tails item.TailIsNullable = true; item.TailFirsts.Clear(); continue; } BnfTerm nextTerm = prod.RValues[i + 1]; //Element after-after-dot; remember we're going in reverse direction //if (ntElem == null) continue; //it is not NonTerminal NonTerminal nextNt = nextTerm as NonTerminal; bool notNullable = nextTerm is Terminal || nextNt != null && !nextNt.Nullable; if (notNullable) //next term is not nullable (a terminal or non-nullable NonTerminal) //term is not nullable, so we clear all old firsts and add this term { accumulatedFirsts.Clear(); allNullable = false; item.TailIsNullable = false; if (nextTerm is Terminal) { item.TailFirsts.Add(nextTerm.Key);//term is terminal so add its key accumulatedFirsts.Add(nextTerm.Key); } else if (nextNt != null) //it is NonTerminal { item.TailFirsts.AddRange(nextNt.Firsts); //nonterminal accumulatedFirsts.AddRange(nextNt.Firsts); } continue; } //if we are here, then ntElem is a nullable NonTerminal. We add accumulatedFirsts.AddRange(nextNt.Firsts); item.TailFirsts.AddRange(accumulatedFirsts); item.TailIsNullable = allNullable; } //for i } //foreach prod } //foreach nt } //method
public void ShouldUnionTwoSet() { var set1 = new StringSet(); var set2 = new StringSet(); set1.Add("Test1"); set2.Add("Test2"); StringSet union = set1.Union(set2); Assert.AreEqual(2, union.Count); Assert.IsTrue( union.Contains("Test1")); Assert.IsTrue( union.Contains("Test2")); }
public static AcpiDevice[] LoadDevices() { OperationRegionAccessor operationRegionAccessor = new OperationRegionAccessor(); acpiNamespace = new AcpiNamespace(); reservedObjects = new ReservedObjects(acpiNamespace); reservedObjects.CreateReservedObjects(); if (dsdt != null) { if (ParseAndLoadRegion(dsdt.Region, operationRegionAccessor) == AmlParser.ParseSuccess.Failure) { throw new Exception("AML parser failure while parsing DSDT"); } } // From the spec: "SSDTs are a continuation of the DSDT. Multiple SSDTs // can be used as part of a platform description. After the DSDT is loaded // into the ACPI Namespace, each secondary description table listed in the // RSDT/XSDT with a unique OEM Table ID is loaded." - section 2.1, General // ACPI Terminology StringSet visitedOemTableIds = new StringSet(); for (int i = 0; i < rsdt.EntryCount; i++) { SystemTableHeader header = rsdt.GetTableHeader(i); VerboseOut.Print(" {0:x8}\n", __arglist(header.Signature)); string oemTableId = header.OemTableId; if (!visitedOemTableIds.Contains(oemTableId) && header.Signature == Ssdt.Signature) { visitedOemTableIds.Add(oemTableId); ssdt = Ssdt.Create(header); if (ParseAndLoadRegion(ssdt.Region, operationRegionAccessor) == AmlParser.ParseSuccess.Failure) { throw new Exception("AML parser failure while parsing SSDT " + oemTableId); } } } #if DUMP_ACPI_NAMESPACE DebugStub.WriteLine("Dumping ACPI namespace tree..."); acpiNamespace.DumpTree(); #endif return(GetDeviceInfo(operationRegionAccessor)); }
//TODO: This needs more work. Currently it reports all individual symbols most of the time, in a message like // "Syntax error, expected: + - < > = ..."; the better method is to group operator symbols under one alias "operator". // The reason is that code picks expected key list at current(!) state only, // slightly tweaking it for non-terminals, without exploring Reduce roots // It is quite difficult to discover grouping non-terminals like "operator" in current structure. // One possible solution would be to introduce "ExtendedLookaheads" in ParserState which would include // all NonTerminals that might follow the current position. This list would be calculated at start up, // in addition to normal lookaheads. #endregion private StringSet GetCurrentExpectedSymbols() { BnfTermList inputElements = new BnfTermList(); StringSet inputKeys = new StringSet(); inputKeys.AddRange(_currentState.Actions.Keys); //First check all NonTerminals foreach (NonTerminal nt in _context.Compiler.Grammar.NonTerminals) { if (!inputKeys.Contains(nt.Key)) { continue; } //nt is one of our available inputs; check if it has an alias. If not, don't add it to element list; // because we have already all its "Firsts" keys in the list. // If yes, add nt to element list and remove // all its "fists" symbols from the list. These removed symbols will be represented by single nt alias. if (string.IsNullOrEmpty(nt.DisplayName)) { inputKeys.Remove(nt.Key); } else { inputElements.Add(nt); foreach (string first in nt.Firsts) { inputKeys.Remove(first); } } } //Now terminals foreach (Terminal term in _context.Compiler.Grammar.Terminals) { if (inputKeys.Contains(term.Key)) { inputElements.Add(term); } } StringSet result = new StringSet(); foreach (BnfTerm term in inputElements) { result.Add(string.IsNullOrEmpty(term.DisplayName)? term.Name : term.DisplayName); } return(result); }
} //method #endregion #region Calculating Tail Firsts private void CalculateTailFirsts() { foreach (Production prod in Data.Productions) { StringSet accumulatedFirsts = new StringSet(); bool allNullable = true; //We are going backwards in LR0Items list for (int i = prod.LR0Items.Count - 1; i >= 0; i--) { LR0Item item = prod.LR0Items[i]; if (i >= prod.LR0Items.Count - 2) { //Last and before last items have empty tails item.TailIsNullable = true; item.TailFirsts.Clear(); continue; } BnfTerm term = prod.RValues[item.Position + 1]; //Element after-after-dot NonTerminal ntElem = term as NonTerminal; if (ntElem == null || !ntElem.Nullable) //term is a terminal or non-nullable NonTerminal //term is not nullable, so we clear all old firsts and add this term { accumulatedFirsts.Clear(); allNullable = false; item.TailIsNullable = false; if (ntElem == null) { item.TailFirsts.Add(term.Key);//term is terminal so add its key accumulatedFirsts.Add(term.Key); } else { item.TailFirsts.AddRange(ntElem.Firsts); //nonterminal accumulatedFirsts.AddRange(ntElem.Firsts); } continue; } //if we are here, then ntElem is a nullable NonTerminal. We add accumulatedFirsts.AddRange(ntElem.Firsts); item.TailFirsts.AddRange(accumulatedFirsts); item.TailIsNullable = allNullable; } //for i } //foreach prod } //method
private void ValidateAll() { //Check rule on all non-terminals StringSet ntList = new StringSet(); foreach (NonTerminal nt in Data.NonTerminals) { if (nt == Data.AugmentedRoot) { continue; //augm root does not count } BnfExpressionData data = nt.Rule.Data; if (data.Count == 1 && data[0].Count == 1 && data[0][0] is NonTerminal) { ntList.Add(nt.Name); } }//foreach if (ntList.Count > 0) { string slist = TextUtils.Cleanup(ntList.ToString(", ")); AddError("Warning: Possible non-terminal duplication. The following non-terminals have rules containing a single non-terminal: \r\n {0}. \r\n" + "Consider merging two non-terminals; you may need to use 'nt1 = nt2;' instead of 'nt1.Rule=nt2'.", slist); } //Check constructors of all nodes referenced in Non-terminals that don't use NodeCreator delegate var ctorArgTypes = new Type[] { typeof(NodeArgs) }; foreach (NonTerminal nt in Data.NonTerminals) { if (nt.NodeCreator == null && nt.NodeType != null) { object ci = nt.NodeType.GetConstructor(ctorArgTypes); if (ci == null) { AddError( @"AST Node class {0} referenced by non-terminal {1} does not have a constructor for automatic node creation. Provide a constructor with a single NodeArgs parameter, or use NodeCreator delegate property in NonTerminal.", nt.NodeType, nt.Name); } } //if } //foreach ntInfo } //method
private void ParseFile(string physicalPath, VirtualPath virtualPath) { // Determine the file used for the circular references checker. Normally, // we use the virtualPath, but we use the physical path if it specified, // as is the case for <!-- #include file="foo.inc" --> string fileToReferenceCheck = physicalPath != null ? physicalPath : virtualPath.VirtualPathString; // Check for circular references of include files if (_circularReferenceChecker.Contains(fileToReferenceCheck)) { throw new HttpException( SR.GetString(SR.Circular_include)); } // Add the current file to the circular references checker. _circularReferenceChecker.Add(fileToReferenceCheck); try { // Open a TextReader either from the physical or virtual path TextReader reader; if (physicalPath != null) { using (reader = Util.ReaderFromFile(physicalPath, virtualPath)) { ParseReader(reader); } } else { using (Stream stream = virtualPath.OpenFile()) { reader = Util.ReaderFromStream(stream, virtualPath); ParseReader(reader); } } } finally { // Remove the current file from the circular references checker _circularReferenceChecker.Remove(fileToReferenceCheck); } }
private void ValidateAll() { //Check rule on all non-terminals StringSet ntList = new StringSet(); foreach (NonTerminal nt in Data.NonTerminals) { if (nt == Data.AugmentedRoot) { continue; //augm root does not count } BnfExpressionData data = nt.Rule.Data; if (data.Count == 1 && data[0].Count == 1 && data[0][0] is NonTerminal) { ntList.Add(nt.Name); } }//foreach if (ntList.Count > 0) { string slist = TextUtils.Cleanup(ntList.ToString(", ")); AddError("Warning: Possible non-terminal duplication. The following non-terminals have rules containing a single non-terminal: \r\n {0}. \r\n" + "Consider merging two non-terminals; you may need to use 'nt1 = nt2;' instead of 'nt1.Rule=nt2'.", slist); } }
public override DataTable GetTableConstraints() { var dtAll = GetSchemaCollection("ForeignKeys"); //this gives us only foreign keys // We need to add PKs; Each PK in SQLite is 'supported' by an index named 'sqlite_autoindex_*' // We scan index columns to pick up such names and add PK rows to dtAll. //Add PKs by scanning index columns and finding special-named indexes (starting with sqlite_autoindex) var dtIndexes = GetIndexColumns(); var tNames = new StringSet(); //track tables to prevent duplicates foreach(DataRow row in dtIndexes.Rows) { var ixName = row.GetAsString("INDEX_NAME"); if(!IsPrimaryKeyIndex(ixName)) continue; var tblName = row.GetAsString("TABLE_NAME"); if (tNames.Contains(tblName)) continue; //don't add duplicates tNames.Add(tblName); //it is auto-index for PK, create a row for the index var pkRow = dtAll.NewRow(); pkRow["TABLE_NAME"] = tblName; pkRow["CONSTRAINT_NAME"] = row.GetAsString("INDEX_NAME"); pkRow["CONSTRAINT_TYPE"] = "PRIMARY KEY"; dtAll.Rows.Add(pkRow); } return dtAll; }
//TODO: This needs more work. Currently it reports all individual symbols most of the time, in a message like // "Syntax error, expected: + - < > = ..."; the better method is to group operator symbols under one alias "operator". // The reason is that code picks expected key list at current(!) state only, // slightly tweaking it for non-terminals, without exploring Reduce roots // It is quite difficult to discover grouping non-terminals like "operator" in current structure. // One possible solution would be to introduce "ExtendedLookaheads" in ParserState which would include // all NonTerminals that might follow the current position. This list would be calculated at start up, // in addition to normal lookaheads. #endregion private StringSet GetCurrentExpectedSymbols() { BnfTermList inputElements = new BnfTermList(); StringSet inputKeys = new StringSet(); inputKeys.AddRange(_currentState.Actions.Keys); //First check all NonTerminals foreach (NonTerminal nt in _context.Compiler.Grammar.NonTerminals) { if (!inputKeys.Contains(nt.Key)) continue; //nt is one of our available inputs; check if it has an alias. If not, don't add it to element list; // because we have already all its "Firsts" keys in the list. // If yes, add nt to element list and remove // all its "fists" symbols from the list. These removed symbols will be represented by single nt alias. if (string.IsNullOrEmpty(nt.DisplayName)) inputKeys.Remove(nt.Key); else { inputElements.Add(nt); foreach(string first in nt.Firsts) inputKeys.Remove(first); } } //Now terminals foreach (Terminal term in _context.Compiler.Grammar.Terminals) { if (inputKeys.Contains(term.Key)) inputElements.Add(term); } StringSet result = new StringSet(); foreach(BnfTerm term in inputElements) result.Add(string.IsNullOrEmpty(term.DisplayName)? term.Name : term.DisplayName); return result; }
protected void CheckAliases(TranslationContext context, IEnumerable<Expression> outExpressions) { var sqlExpressions = outExpressions.OfType<SqlExpression>().ToList(); var allNames = new StringSet(); foreach(var outExpr in sqlExpressions) { string outName = null; var col = outExpr as ColumnExpression; if (col != null) { //if (isView) col.Alias = col.Name; outName = col.Alias ?? col.Name; } var needsAlias = outName != null && allNames.Contains(outName); if(outName != null) allNames.Add(outName); if(needsAlias) outExpr.Alias = CreateDefaultAlias(outExpr, allNames); }//foreach outExpr }
private static StringSet BuildApprovedFunctionsStringSet() { var result = new StringSet(); result.Add("_TlgWrite"); return result; }
private void CalculateTailFirsts() { foreach (Production prod in _data.Productions) { StringSet accumulatedFirsts = new StringSet(); bool allNullable = true; for (int i = prod.LR0Items.Count - 1; i >= 0; i--) { LR0Item item = prod.LR0Items[i]; if (i >= prod.LR0Items.Count - 2) { item.TailIsNullable = true; item.TailFirsts.Clear(); continue; } GrammarTerm term = prod.RValues[item.Position + 1]; NonTerminal ntElem = term as NonTerminal; if (ntElem == null || !ntElem.Nullable) { accumulatedFirsts.Clear(); allNullable = false; item.TailIsNullable = false; if (ntElem == null) { item.TailFirsts.Add(term.Key); accumulatedFirsts.Add(term.Key); } else { item.TailFirsts.AddRange(ntElem.Firsts); accumulatedFirsts.AddRange(ntElem.Firsts); } continue; } accumulatedFirsts.AddRange(ntElem.Firsts); item.TailFirsts.AddRange(accumulatedFirsts); item.TailIsNullable = allNullable; } } }
private void ValidateAll() { StringSet ntList = new StringSet(); foreach (NonTerminal nt in _data.NonTerminals) { if (nt == _data.AugmentedRoot) continue; BnfExpressionData data = nt.Rule.Data; if (data.Count == 1 && data[0].Count == 1 && data[0][0] is NonTerminal) ntList.Add(nt.Name); } if (ntList.Count > 0) { AddError("Warning: Possible non-terminal duplication. The following non-terminals have rules containing a single non-terminal: \r\n {0}. \r\n" + "Consider merging two non-terminals; you may need to use 'nt1 = nt2;' instead of 'nt1.Rule=nt2'."); } }
public void ShouldAddStringAndContainsReturnTrue() { var stringSet = new StringSet(); stringSet.Add("Test"); Assert.IsTrue(stringSet.Contains("Test")); }
private static void LoadStringSet(StringSet set, XmlReader reader) { reader.ReadStartElement(PROPERTY_ID); while (reader.IsStartElement(ITEM_ID)) { string item; bool isEmptyItem; isEmptyItem = reader.IsEmptyElement; reader.ReadStartElement(); item = reader.ReadString(); set.Add(item); if (!isEmptyItem) reader.ReadEndElement(); } }
public void Null() { StringSet ss = new StringSet(); ss.Add((string)null); }