static void Main() { InitializeWordsByChar(); int textLinesCount = int.Parse(Console.ReadLine().ToLower()); for (int i = 0; i < textLinesCount; i++) { GetWords(Console.ReadLine().ToLower()); } int wordsCount = int.Parse(Console.ReadLine()); for (int i = 0; i < wordsCount; i++) { string word = Console.ReadLine(); string wordLowerCase = word.ToLower(); HashSet<string> currentWords = new HashSet<string>(); currentWords.UnionWith(wordsByChar[wordLowerCase[0]]); for (int j = 1; j < wordLowerCase.Length; j++) { currentWords.IntersectWith(wordsByChar[wordLowerCase[j]]); } Console.WriteLine("{0} -> {1}", word, currentWords.Count); } }
static void Main(string[] args) { var letters = new HashSet<char>("the quick brown fox"); Console.WriteLine(letters.Contains('t')); // true Console.WriteLine(letters.Contains('j')); // false foreach (char c in letters) { Console.Write(c); // the quickbrownfx } letters.IntersectWith("aeiou"); foreach (char c in letters) { Console.Write(c); // euio } var letters2 = new HashSet<char>("the quick brown fox"); letters2.ExceptWith("aeiou"); foreach (char c in letters2) { Console.Write(c); // th qckbrwnfx } var letters3 = new HashSet<char>("the quick brown fox"); letters3.SymmetricExceptWith("the lazy brown fox"); foreach (char c in letters3) { Console.Write(c); // quicklazy } }
private async void Window_Loaded(object sender, EventArgs e) { this.Loaded-=Window_Loaded; HashSet<uint> allIds=new HashSet<uint>(); var selectedIds=new HashSet<uint>(Settings.Default.Channels.Select(c => c.Id)); Color defaultColor=0x99A2A8.ToColor(); // Retos gadījumos skanošo sarakstā ir kanāls, kuru nerāda ar krāsainām saitēm. // Iegūst pamata datus: nosaukumu, īsu aprakstu un identifikatoru. foreach (Match match in channelRx.Matches(await client.DownloadStringTaskAsync("http://live.pieci.lv/"))) { string idString=match.Groups["id"].Value; uint id=idString.Length == 0 ? PieciStation.EmptyId:uint.Parse(idString); channels.Add(new ChannelItem(id, match.Groups["caption"].Value, defaultColor, match.Groups["description"].Value, match.Groups["name"].Value, selectedIds.Contains(id))); allIds.Add(id); } // Iegūst krāsas. Lielākai daļai kanālu tās ir zināmas (un tāpēc ir iekļautas ikonas), bet dažiem sezonāliem kanāliem nevar paredzēt, tāpēc izgūst no atskaņotāja lapas. foreach (Match match in colorRx.Matches(await client.DownloadStringTaskAsync("http://fm.pieci.lv"))) { uint id=uint.Parse(match.Groups["id"].Value); channels.Single(c => c.Id == id).SetColor(match.Groups["color"].Value); } list.ItemsSource=channels; // Izņem no saraksta zudušos kanālus. int beforeCount=selectedIds.Count; selectedIds.IntersectWith(allIds); HasChanges=beforeCount != selectedIds.Count; }
public static HashSet<char> Intersection(IEnumerable<char> a, IEnumerable<char> b) { var set = new HashSet<char>(a); set.IntersectWith(b); return set; }
private double DicesCoeffienct(string in1, string in2) { HashSet<string> nx = new HashSet<string>(); HashSet<string> ny = new HashSet<string>(); for(int i = 0; i < in1.Length - 1; i++) { char x1 = in1[i]; char x2 = in1[i + 1]; string temp = x1.ToString() + x2.ToString(); nx.Add(temp); } for(int j = 0; j < in2.Length - 1; j++) { char y1 = in2[j]; char y2 = in2[j + 1]; string temp = y1.ToString() + y2.ToString(); ny.Add(temp); } HashSet<string> intersection = new HashSet<string>(nx); intersection.IntersectWith(ny); double dbOne = intersection.Count; return (2 * dbOne) / (nx.Count + ny.Count); }
/// <summary> /// Determines whether the interpreter factory contains the specified /// modules. /// </summary> /// <returns>The names of the modules that were found.</returns> public static async Task<HashSet<string>> FindModulesAsync(this IPythonInterpreterFactory factory, params string[] moduleNames) { var withDb = factory as PythonInterpreterFactoryWithDatabase; if (withDb != null && withDb.IsCurrent) { var db = withDb.GetCurrentDatabase(); var set = new HashSet<string>(moduleNames.Where(m => db.GetModule(m) != null)); return set; } var expected = new HashSet<string>(moduleNames); if (withDb != null) { var paths = PythonTypeDatabase.GetCachedDatabaseSearchPaths(withDb.DatabasePath) ?? await PythonTypeDatabase.GetUncachedDatabaseSearchPathsAsync(withDb.Configuration.InterpreterPath).ConfigureAwait(false); var db = PythonTypeDatabase.GetDatabaseExpectedModules(withDb.Configuration.Version, paths) .SelectMany() .Select(g => g.ModuleName); expected.IntersectWith(db); return expected; } return await Task.Run(() => { var result = new HashSet<string>(); foreach (var mp in ModulePath.GetModulesInLib(factory)) { if (expected.Count == 0) { break; } if (expected.Remove(mp.ModuleName)) { result.Add(mp.ModuleName); } } return result; }); }
static long searchRtrees(ArrayList words) { long word_c = 0; int ii = 0; foreach (String w in words) { DateTime tt0 = DateTime.Now; List<Rectangle> rects = getPoint(w, 1); HashSet<string> allwords = new HashSet<string>(); foreach (string s in words) allwords.Add(s); int i = 0; foreach (Rectangle r in rects) { List<string> objects = rts[i].Intersects(r); i++; HashSet<string> o = new HashSet<string>(); foreach (string s in objects) { o.Add(s); } allwords.IntersectWith(o); } TimeSpan tts1 = DateTime.Now - tt0; Console.WriteLine(w + " :" + allwords.Count + " " + tts1); word_c += allwords.Count; ii++; if (ii > 10000) break; } return word_c; }
public void IntersectWithNullTest() { HashSet<string> names = new HashSet<string>(); names.Add("Pesho"); names.Add("Gosho"); names.IntersectWith(null); }
static void Main() { #if DEBUG Console.SetIn(new System.IO.StreamReader("../../input.txt")); #endif Dictionary<string, HashSet<string>> words = new Dictionary<string, HashSet<string>>(); for (char i = 'a'; i <= 'z'; i++) { words[i.ToString()] = new HashSet<string>(); } int n = int.Parse(Console.ReadLine()); for (int i = 0; i < n; i++) { string input = Console.ReadLine().ToLower(); input += " "; string word = string.Empty; for (int j = 0; j < input.Length; j++) { if (input[j] >= 'a' && input[j] <= 'z') { word += input[j]; } else if(word.Length > 0) { foreach (var w in word) { if (w == 'a') { } words[w.ToString()].Add(word); //words.AddSafeReturn(w, new HashSet<string>()).Add(word); } word = string.Empty; } } } n = int.Parse(Console.ReadLine()); for (int i = 0; i < n; i++) { string input = Console.ReadLine(); string inputToLower = input.ToLower(); HashSet<string> result = new HashSet<string>(words[inputToLower[0].ToString()]); for (int j = 1; j < inputToLower.Length; j++) { result.IntersectWith(words[inputToLower[j].ToString()]); } Console.WriteLine("{0} -> {1}", input, result.Count); } }
public double GetDistance(string a, string b) { HashSet<string> seta = new HashSet<string>(StringUtils.GetLuceneTokens(a)); int na = seta.Count; HashSet<string> setb = StringUtils.GetLuceneTokens(b); int nb = setb.Count; seta.IntersectWith(setb); return 1.0d-((double) seta.Count*seta.Count/(na*nb)); }
static void Main() { HashSet<char> firstString = new HashSet<char>( Console.ReadLine().ToCharArray()); HashSet<char> secondString = new HashSet<char>( Console.ReadLine().ToCharArray()); firstString.IntersectWith(secondString); Console.WriteLine(firstString.Count > 1 ? "yes" : "no"); }
public double GetDistance(string a, string b) { List<string> tokensa = new List<string>(StringUtils.GetLuceneTokens(a)); List<string> tokensb = new List<string>(StringUtils.GetLuceneTokens(b)); HashSet<string> inter = new HashSet<string>(tokensa); inter.IntersectWith(tokensb); HashSet<string> union = new HashSet<string>(tokensa); union.UnionWith(tokensb); return (double) (union.Count-inter.Count);///union.Count; }
public double GetDistance(string a, string b) { HashSet<string> seta = new HashSet<string>(StringUtils.GetLuceneTokens(a)); HashSet<string> setb = new HashSet<string>(StringUtils.GetLuceneTokens(b)); HashSet<string> intersec = new HashSet<string>(seta); HashSet<string> union = new HashSet<string>(seta); intersec.IntersectWith(setb); union.UnionWith(setb); return 1.0d-((double) intersec.Count/union.Count); }
public override void Process(IEnumerable<string> expected, IEnumerable<string> result) { d++; HashSet<string> delta = new HashSet<string>(expected); delta.UnionWith(result); HashSet<string> section = new HashSet<string>(expected); section.IntersectWith(result); delta.ExceptWith(section); score += (double) delta.Count; }
public override void Process(IEnumerable<string> expected, IEnumerable<string> result) { HashSet<string> section = new HashSet<string>(result); int z = section.Count; if(z != 0) { d++; section.IntersectWith(expected); score += (double) section.Count/z; } }
private static HashSet<int> IntersectSets(HashSet<int> set1, HashSet<int> set2) { HashSet<int> intersection = new HashSet<int>(); foreach (int element in set1) { intersection.Add(element); } intersection.IntersectWith(set2); return intersection; }
public override bool Equals(object obj) { UnificationResult otherUnifier = obj as UnificationResult; if (otherUnifier == null) return false; var substitutionSetOther = new HashSet<Substitution>(otherUnifier.Unifier); var subtitutionSetThis = new HashSet<Substitution>(Unifier); substitutionSetOther.IntersectWith(subtitutionSetThis); return Succeed == otherUnifier.Succeed && substitutionSetOther.Count == subtitutionSetThis.Count; }
public static int Main(string[] args) { List<List<string>> tags = new List<List<string>>(); List<string> lines = new List<string>(); Stream s = File.Open("todos2",FileMode.Open,FileAccess.Read); TextReader tr = new StreamReader(s); string line = tr.ReadLine(); string text; while(line != null) { tags.Add(ParseTags(line,out text)); lines.Add(text); line = tr.ReadLine(); } tr.Close(); s.Close(); StringDistanceMetric[] metrics = {new LevenshteinWordDistanceMetric(),new CosineDistanceMetric(),new DiceDistanceMetric(),new JacardDistanceMetric(), new EuclidDistanceMetric()}; foreach(StringDistanceMetric metric in metrics) { double max = double.NegativeInfinity; for(int i = 0; i < lines.Count; i++) { for(int j = i+1; j < lines.Count; j++) { max = Math.Max((double) metric.GetDistance(lines[i],lines[j]),max); } } s = File.Open("DistanceMetricResults.dat",FileMode.Create,FileAccess.Write); TextWriter tw = new StreamWriter(s); HashSet<string> xdiff = new HashSet<string>(); int ndx = 20; int ndy = 40; int[] vari = new int[ndx+1]; int[,] chart = new int[ndx+1,ndy+1]; for(int i = 0; i < lines.Count; i++) { for(int j = i+1; j < lines.Count; j++) { xdiff.Clear(); xdiff.UnionWith(tags[i]); xdiff.IntersectWith(tags[j]); double xdistance = 2.0d*xdiff.Count/(tags[i].Count+tags[j].Count); double distance = (double) metric.GetDistance(lines[i],lines[j]); chart[(int) Math.Floor(xdistance*ndx),(int) Math.Floor(distance*ndy/max)]++; vari[(int) Math.Floor(xdistance*ndx)]++; } } for(int i = 0; i <= ndx; i++) { if(vari[i] == 0) { vari[i] = 1; } for(int j = 0; j <= ndy; j++) { tw.WriteLine("{0}\t{1}\t{2}",((double) i/ndx).ToString(nfi),((double) j*max/ndy).ToString(nfi),((double) chart[i,j]/vari[i]).ToString(nfi)); } tw.WriteLine(); } tw.Close(); s.Close(); } return 0; }
internal static void Main() { string decorationLine = new string('-', Console.WindowWidth); Console.Write(decorationLine); Console.WriteLine("***Presenting the functionality of the data structure 'Hash set'***"); Console.Write(decorationLine); HashSet<int> years = new HashSet<int>(); Console.WriteLine("---Add operation---"); years.Add(1990); years.Add(1992); years.Add(2013); years.Add(2016); years.Add(2022); Console.WriteLine("Count = " + years.Count); Console.WriteLine(); Console.WriteLine("---Iterator functionality---"); PrintYears(years); Console.WriteLine(); Console.WriteLine("---Contains operation---"); Console.WriteLine("Does years set contain {0}? - {1}", 1992, years.Contains(1992)); Console.WriteLine("Does years set contain {0}? - {1}", 2012, years.Contains(2012)); Console.WriteLine(); Console.WriteLine("---Remove operation---"); Console.WriteLine("Is {0} removed from years set? - {1}", 1996, years.Remove(1996)); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine("Is {0} removed from years set? - {1}", 1990, years.Remove(1990)); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine(); Console.WriteLine("---UnionWith operation---"); int[] yearsToUnionWith = new int[] { 2005, 2009, 2021, 2016, 1992, 2013 }; years.UnionWith(yearsToUnionWith); Console.WriteLine("All years after a union with: {0}", string.Join(", ", yearsToUnionWith)); PrintYears(years); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine(); Console.WriteLine("---IntersectWith operation---"); int[] yearsToIntersectWith = new int[] { 2045, 2025, 2021, 2016, 1999, 2017, 2013 }; years.IntersectWith(yearsToIntersectWith); Console.WriteLine("All years after an intersect with: {0}", string.Join(", ", yearsToIntersectWith)); PrintYears(years); Console.WriteLine("Years set count: " + years.Count); Console.WriteLine(); Console.WriteLine("---Clear operation---"); years.Clear(); Console.WriteLine("Years count after clearing: " + years.Count); }
public void TestIntersectWith () { var aSet = new HashSet<int> { 1, 2 }; var bSet = new HashSet<int> { 1 }; aSet.IntersectWith (bSet); Assert.IsTrue (aSet.Contains (1)); Assert.IsFalse (aSet.Contains (2)); Assert.AreEqual (1, aSet.Count); }
public override bool Equals(object other) { var typedOther = other as TrsLimitStatement; if (typedOther == null || !typedOther.TypeDefinition.Equals(this.TypeDefinition)) return false; var otherVars = new HashSet<TrsVariable>(typedOther.Variables); var thisVars = new HashSet<TrsVariable>(this.Variables); otherVars.IntersectWith(thisVars); if (otherVars.Count != thisVars.Count) return false; else return true; }
public double GetDistance(string a, string b) { HashSet<string> ta = new HashSet<string>(StringUtils.GetLuceneTokens(a)); HashSet<string> tb = new HashSet<string>(StringUtils.GetLuceneTokens(b)); HashSet<string> xor = new HashSet<string>(ta); xor.UnionWith(tb); HashSet<string> intersec = new HashSet<string>(ta); intersec.IntersectWith(tb); xor.ExceptWith(intersec); return Math.Sqrt(xor.Count); }
private static double ComputeDiceCoefficient(string arg1, string arg2) { var nx = GetBigramms(arg1); var ny = GetBigramms(arg2); var intersection = new HashSet<string>(nx); intersection.IntersectWith(ny); double dbOne = intersection.Count; return (2 * dbOne) / (nx.Count + ny.Count); }
public override void Process(IEnumerable<string> expected, IEnumerable<string> result) { HashSet<string> section = new HashSet<string>(expected); int nx = section.Count; List<string> other = new List<string>(result); int ny = other.Count; section.IntersectWith(result); int x = section.Count; d++; score += (double) 2.0d*x/(nx+ny); }
/// <summary> /// Gets all the names of the columns common to all tables in a unordered fashion. /// </summary> /// <returns>The names of all the columns common to all tables in a unordered fashion</returns> public HashSet<string> GetCommonColumnNamesUnordered() { if (_tables.Count == 0) return new HashSet<string>(); // now determine which columns are common to all selected tables. var commonColumnNames = new HashSet<string>(_tables[0].DataColumns.GetColumnNames()); for (int i = 1; i < _tables.Count; i++) commonColumnNames.IntersectWith(_tables[i].DataColumns.GetColumnNames()); return commonColumnNames; }
/** * and : * Returns new Set of nodes which is the intersection of the @param predicates results * Example let predicate_1 output be {A,B,C} and predicate_2 output be {A,D} then the "and" result is {A} * @param predicates * @return */ public static Predicate and(params Predicate[] predicates) { return delegate(HashSet<Node> nodes) { HashSet<Node> result=new HashSet<Node>(); foreach( Predicate predicate in predicates){ result.IntersectWith(predicate(nodes)); } return result; } ; }
private void GuardAgainstFieldOverlap(Entity entity) { var entityKeys = new HashSet<string>(entity.Fields.WithOutput().WithoutPrimaryKey().Aliases()); var processKeys = new HashSet<string>(_process.Entities.SelectMany(e2 => e2.Fields.WithOutput().Aliases())); entityKeys.IntersectWith(processKeys); if (!entityKeys.Any()) return; var count = entityKeys.Count; _process.Logger.EntityWarn(entity.Name, "field overlap error in {3}. The field{1}: {0} {2} already defined in previous entities. You must alias (rename) these.", string.Join(", ", entityKeys), count.Plural(), count == 1 ? "is" : "are", entity.Alias); }
public override void Process(IEnumerable<string> expected, IEnumerable<string> result) { HashSet<string> section = new HashSet<string>(expected); HashSet<string> union = new HashSet<string>(expected); union.UnionWith(result); int n = union.Count; if(n != 0) { section.IntersectWith(result); score += (double) section.Count/union.Count; d++; } }
//计算两个List的Jaccard距离 public static double getJaccardDistance(List<string> source, List<string> target) { HashSet<string> set1 = new HashSet<string>(source); HashSet<string> set2 = new HashSet<string>(target); HashSet<string> intersection = new HashSet<string>(set1); HashSet<string> union = new HashSet<string>(set1); intersection.IntersectWith(set2); union.UnionWith(set2); return intersection.Count / (double)union.Count; }
public UnitWithLimitedPrefixes(IUnit unit, IEnumerable<SIPrefix> allowedPrefixes) { if (null == unit) throw new ArgumentNullException("unit must not be null"); _unit = unit; if (null != allowedPrefixes) { var l = new HashSet<SIPrefix>(_unit.Prefixes); l.IntersectWith(allowedPrefixes); _prefixes = new SIPrefixList(l); } }
private void nextStep_Click(object sender, EventArgs e) { switch (ind) { case 0: setNotA.ExceptWith(Main.settA); foreach (int i in setNotA) { notABox.Text += i.ToString() + ", "; } ind++; break; case 1: setNotC.ExceptWith(Main.settC); foreach (int i in setNotC) { notCBox.Text += i.ToString() + ", "; } ind++; break; case 2: set1.IntersectWith(setNotC); foreach (int i in set1) { textBox1.Text += i.ToString() + ", "; } ind++; break; case 3: set2.IntersectWith(setNotA); foreach (int i in set2) { textBox2.Text += i.ToString() + ", "; } ind++; break; case 4: set3.IntersectWith(Main.settB); foreach (int i in set3) { textBox3.Text += i.ToString() + ", "; } ind++; break; case 5: set4.UnionWith(setNotA); foreach (int i in set4) { textBox4.Text += i.ToString() + ", "; } ind++; break; case 6: set4.UnionWith(setNotC); foreach (int i in set4) { textBox5.Text += i.ToString() + ", "; } ind++; break; case 7: set1.UnionWith(set2); foreach (int i in set1) { textBox6.Text += i.ToString() + ", "; } ind++; break; case 8: set4.UnionWith(set1); foreach (int i in set4) { textBox7.Text += i.ToString() + ", "; } ind++; break; case 9: set4.UnionWith(set3); foreach (int i in set4) { setDBox.Text += i.ToString() + ", "; } nextStep.Enabled = false; saveButton.Visible = true; saveButton.Focus(); break; } }
public async Task <int> ExecuteCommand(PackageReferenceArgs packageReferenceArgs, MSBuildAPIUtility msBuild) { packageReferenceArgs.Logger.LogInformation(string.Format(CultureInfo.CurrentCulture, Strings.Info_AddPkgAddingReference, packageReferenceArgs.PackageDependency.Id, packageReferenceArgs.ProjectPath)); if (packageReferenceArgs.NoRestore) { packageReferenceArgs.Logger.LogWarning(string.Format(CultureInfo.CurrentCulture, Strings.Warn_AddPkgWithoutRestore)); var libraryDependency = new LibraryDependency { LibraryRange = new LibraryRange( name: packageReferenceArgs.PackageDependency.Id, versionRange: packageReferenceArgs.PackageDependency.VersionRange, typeConstraint: LibraryDependencyTarget.Package) }; msBuild.AddPackageReference(packageReferenceArgs.ProjectPath, libraryDependency); return(0); } // 1. Get project dg file packageReferenceArgs.Logger.LogDebug("Reading project Dependency Graph"); var dgSpec = ReadProjectDependencyGraph(packageReferenceArgs); if (dgSpec == null) { // Logging non localized error on debug stream. packageReferenceArgs.Logger.LogDebug(Strings.Error_NoDgSpec); throw new InvalidOperationException(string.Format(CultureInfo.CurrentCulture, Strings.Error_NoDgSpec)); } packageReferenceArgs.Logger.LogDebug("Project Dependency Graph Read"); var projectFullPath = Path.GetFullPath(packageReferenceArgs.ProjectPath); var matchingPackageSpecs = dgSpec .Projects .Where(p => p.RestoreMetadata.ProjectStyle == ProjectStyle.PackageReference && PathUtility.GetStringComparerBasedOnOS().Equals(Path.GetFullPath(p.RestoreMetadata.ProjectPath), projectFullPath)) .ToArray(); // This ensures that the DG specs generated in previous steps contain exactly 1 project with the same path as the project requesting add package. // Throw otherwise since we cannot proceed further. if (matchingPackageSpecs.Length != 1) { throw new InvalidOperationException(string.Format(CultureInfo.CurrentCulture, Strings.Error_UnsupportedProject, packageReferenceArgs.PackageDependency.Id, packageReferenceArgs.ProjectPath)); } // Parse the user specified frameworks once to avoid re-do's var userSpecifiedFrameworks = Enumerable.Empty <NuGetFramework>(); if (packageReferenceArgs.Frameworks?.Any() == true) { userSpecifiedFrameworks = packageReferenceArgs .Frameworks .Select(f => NuGetFramework.Parse(f)); } var originalPackageSpec = matchingPackageSpecs.FirstOrDefault(); // Create a copy to avoid modifying the original spec which may be shared. var updatedPackageSpec = originalPackageSpec.Clone(); if (packageReferenceArgs.Frameworks?.Any() == true) { // If user specified frameworks then just use them to add the dependency PackageSpecOperations.AddOrUpdateDependency(updatedPackageSpec, packageReferenceArgs.PackageDependency, userSpecifiedFrameworks); } else { // If the user has not specified a framework, then just add it to all frameworks PackageSpecOperations.AddOrUpdateDependency(updatedPackageSpec, packageReferenceArgs.PackageDependency, updatedPackageSpec.TargetFrameworks.Select(e => e.FrameworkName)); } var updatedDgSpec = dgSpec.WithReplacedSpec(updatedPackageSpec).WithoutRestores(); updatedDgSpec.AddRestore(updatedPackageSpec.RestoreMetadata.ProjectUniqueName); // 2. Run Restore Preview packageReferenceArgs.Logger.LogDebug("Running Restore preview"); var restorePreviewResult = await PreviewAddPackageReferenceAsync(packageReferenceArgs, updatedDgSpec); packageReferenceArgs.Logger.LogDebug("Restore Review completed"); // 3. Process Restore Result var compatibleFrameworks = new HashSet <NuGetFramework>( restorePreviewResult .Result .CompatibilityCheckResults .Where(t => t.Success) .Select(t => t.Graph.Framework), new NuGetFrameworkFullComparer()); if (packageReferenceArgs.Frameworks?.Any() == true) { // If the user has specified frameworks then we intersect that with the compatible frameworks. var userSpecifiedFrameworkSet = new HashSet <NuGetFramework>( userSpecifiedFrameworks, new NuGetFrameworkFullComparer()); compatibleFrameworks.IntersectWith(userSpecifiedFrameworkSet); } // 4. Write to Project if (compatibleFrameworks.Count == 0) { // Package is compatible with none of the project TFMs // Do not add a package reference, throw appropriate error packageReferenceArgs.Logger.LogError(string.Format(CultureInfo.CurrentCulture, Strings.Error_AddPkgIncompatibleWithAllFrameworks, packageReferenceArgs.PackageDependency.Id, packageReferenceArgs.Frameworks?.Any() == true ? Strings.AddPkg_UserSpecified : Strings.AddPkg_All, packageReferenceArgs.ProjectPath)); return(1); } // Ignore the graphs with RID else if (compatibleFrameworks.Count == restorePreviewResult.Result.CompatibilityCheckResults.Where(r => string.IsNullOrEmpty(r.Graph.RuntimeIdentifier)).Count()) { // Package is compatible with all the project TFMs // Add an unconditional package reference to the project packageReferenceArgs.Logger.LogInformation(string.Format(CultureInfo.CurrentCulture, Strings.Info_AddPkgCompatibleWithAllFrameworks, packageReferenceArgs.PackageDependency.Id, packageReferenceArgs.ProjectPath)); // generate a library dependency with all the metadata like Include, Exlude and SuppressParent var libraryDependency = GenerateLibraryDependency(updatedPackageSpec, packageReferenceArgs, restorePreviewResult, userSpecifiedFrameworks); msBuild.AddPackageReference(packageReferenceArgs.ProjectPath, libraryDependency); } else { // Package is compatible with some of the project TFMs // Add conditional package references to the project for the compatible TFMs packageReferenceArgs.Logger.LogInformation(string.Format(CultureInfo.CurrentCulture, Strings.Info_AddPkgCompatibleWithSubsetFrameworks, packageReferenceArgs.PackageDependency.Id, packageReferenceArgs.ProjectPath)); var compatibleOriginalFrameworks = originalPackageSpec.RestoreMetadata .OriginalTargetFrameworks .Where(s => compatibleFrameworks.Contains(NuGetFramework.Parse(s))); // generate a library dependency with all the metadata like Include, Exlude and SuppressParent var libraryDependency = GenerateLibraryDependency(updatedPackageSpec, packageReferenceArgs, restorePreviewResult, userSpecifiedFrameworks); msBuild.AddPackageReferencePerTFM(packageReferenceArgs.ProjectPath, libraryDependency, compatibleOriginalFrameworks); } // 5. Commit restore result await RestoreRunner.CommitAsync(restorePreviewResult, CancellationToken.None); return(0); }
/// <summary> /// If an empty array is passed, the result will be null /// if there are instances, but they share no common supertype the result will be var /// </summary> /// <param name="array"></param> /// <param name="core"></param> /// <returns></returns> public static ClassNode GetGreatestCommonSubclassForArray(StackValue array, Core core) { if (!array.IsArray) { throw new ArgumentException("The stack value provided was not an array"); } Dictionary <ClassNode, int> typeStats = GetTypeStatisticsForArray(array, core); //@PERF: This could be improved with a List <List <int> > chains = new List <List <int> >(); HashSet <int> commonTypeIDs = new HashSet <int>(); foreach (ClassNode cn in typeStats.Keys) { //<<<<<<< .mine List <int> chain = ClassUtils.GetClassUpcastChain(cn, core); //Now add in the other conversions - as we don't have a common superclass yet //@TODO(Jun): Remove this hack when we have a proper casting structure foreach (int id in cn.coerceTypes.Keys) { if (!chain.Contains(id)) { chain.Add((id)); } } //======= // List<int> chain = GetConversionChain(cn, core); //>>>>>>> .r2886 chains.Add(chain); foreach (int nodeId in chain) { commonTypeIDs.Add(nodeId); } } //Remove nulls if they exist { if (commonTypeIDs.Contains( (int)PrimitiveType.kTypeNull)) { commonTypeIDs.Remove((int)PrimitiveType.kTypeNull); } List <List <int> > nonNullChains = new List <List <int> >(); foreach (List <int> chain in chains) { if (chain.Contains((int)PrimitiveType.kTypeNull)) { chain.Remove((int)PrimitiveType.kTypeNull); } if (chain.Count > 0) { nonNullChains.Add(chain); } } chains = nonNullChains; } //Contract the hashset so that it contains only the nodes present in all chains //@PERF: this is very inefficent { foreach (List <int> chain in chains) { commonTypeIDs.IntersectWith(chain); } } //No common subtypes if (commonTypeIDs.Count == 0) { return(null); } if (commonTypeIDs.Count == 1) { return(core.ClassTable.ClassNodes[commonTypeIDs.First()]); } List <int> lookupChain = chains[0]; //Insertion sort the IDs, we may only have a partial ordering on them. List <int> orderedTypes = new List <int>(); foreach (int typeToInsert in commonTypeIDs) { bool inserted = false; for (int i = 0; i < orderedTypes.Count; i++) { int orderedType = orderedTypes[i]; if (lookupChain.IndexOf(typeToInsert) < lookupChain.IndexOf(orderedType)) { inserted = true; orderedTypes.Insert(i, typeToInsert); break; } } if (!inserted) { orderedTypes.Add(typeToInsert); } } return(core.ClassTable.ClassNodes[orderedTypes.First()]); }
private void ChunksAndNodesInCommonInSimilarFilesInternal(HashType hashType) { using var hasher = new DedupNodeOrChunkHashAlgorithm(new ManagedChunker(hashType.GetChunkerConfiguration())); byte[] bytes = new byte[50 * 1024 * 1024]; int offsetForSecondFile = 200 * 1024; var r = new Random(Seed: 0); r.NextBytes(bytes); hasher.SetInputLength(bytes.Length); byte[] hash1 = hasher.ComputeHash(bytes, 0, bytes.Length); var node1 = hasher.GetNode(); HashSet <string> chunks1 = node1.EnumerateChunkLeafsInOrder().Select(c => c.Hash.ToHex()).ToHashSet(); HashSet <string> nodes1 = node1.EnumerateInnerNodesDepthFirst().Select(c => c.Hash.ToHex()).ToHashSet(); hasher.SetInputLength(bytes.Length); byte[] hash2 = hasher.ComputeHash(bytes, offsetForSecondFile, bytes.Length - offsetForSecondFile); var node2 = hasher.GetNode(); HashSet <string> chunks2 = node2.EnumerateChunkLeafsInOrder().Select(c => c.Hash.ToHex()).ToHashSet(); HashSet <string> nodes2 = node2.EnumerateInnerNodesDepthFirst().Select(c => c.Hash.ToHex()).ToHashSet(); Assert.NotEqual(hash1, hash2, ByteArrayComparer.Instance); var commonChunks = new HashSet <string>(chunks1); commonChunks.IntersectWith(chunks2); Assert.Subset(chunks1, commonChunks); Assert.Subset(chunks2, commonChunks); Assert.InRange(commonChunks.Count, chunks1.Count - (chunks1.Count / 10), chunks1.Count); Assert.InRange(commonChunks.Count, chunks2.Count - (chunks2.Count / 10), chunks2.Count); var commonNodes = new HashSet <string>(nodes1); commonNodes.IntersectWith(nodes2); Assert.Subset(nodes1, commonNodes); Assert.Subset(nodes2, commonNodes); int nodeQueries = 0; int chunkQueries = 0; node2.VisitPreorder(n => { switch (n.Type) { case DedupNode.NodeType.ChunkLeaf: chunkQueries++; break; case DedupNode.NodeType.InnerNode: nodeQueries++; break; } return(!nodes1.Contains(n.Hash.ToHex())); }); Assert.Equal(0, commonNodes.Count); Assert.Equal(nodeQueries, nodes2.Count); Assert.Equal(chunkQueries, chunks2.Count); }
public void TestServiceContextEventHandlersTriggeredSuccessiveContexts() { var launcher = GetRootContextLauncher( GetContextConfiguration(), GetServiceConfiguration(), Optional <IConfiguration> .Empty()); IInjector serviceInjector = null; IInjector firstContextInjector = null; IInjector secondContextInjector = null; using (var rootContext = launcher.GetRootContext()) { serviceInjector = rootContext.ServiceInjector; firstContextInjector = rootContext.ContextInjector; using (var childContext = rootContext.SpawnChildContext(GetContextConfiguration())) { secondContextInjector = childContext.ContextInjector; } Assert.NotNull(serviceInjector); Assert.NotNull(firstContextInjector); Assert.NotNull(secondContextInjector); } var serviceContextStartHandlers = serviceInjector.GetNamedInstance <ContextConfigurationOptions.StartHandlers, ISet <IObserver <IContextStart> > >(); var firstContextContextStartHandlers = firstContextInjector.GetNamedInstance <ContextConfigurationOptions.StartHandlers, ISet <IObserver <IContextStart> > >(); var secondContextContextStartHandlers = secondContextInjector.GetNamedInstance <ContextConfigurationOptions.StartHandlers, ISet <IObserver <IContextStart> > >(); Assert.Equal(1, serviceContextStartHandlers.Count); Assert.Equal(2, firstContextContextStartHandlers.Count); Assert.Equal(2, secondContextContextStartHandlers.Count); var intersectSet = new HashSet <IObserver <IContextStart> >(serviceContextStartHandlers); intersectSet.IntersectWith(firstContextContextStartHandlers); intersectSet.IntersectWith(secondContextContextStartHandlers); var unionSet = new HashSet <IObserver <IContextStart> >(serviceContextStartHandlers); unionSet.UnionWith(firstContextContextStartHandlers); unionSet.UnionWith(secondContextContextStartHandlers); Assert.Equal(1, intersectSet.Count); Assert.Equal(3, unionSet.Count); var serviceContextHandler = serviceContextStartHandlers.Single() as ITestContextEventHandler; var unionContextHandlerSet = new HashSet <ITestContextEventHandler>( unionSet.Select(h => h as ITestContextEventHandler).Where(h => h != null)); Assert.Equal(unionSet.Count, unionContextHandlerSet.Count); Assert.True(unionContextHandlerSet.Contains(serviceContextHandler)); foreach (var handler in unionContextHandlerSet.Where(h => h != null)) { if (ReferenceEquals(handler, serviceContextHandler)) { Assert.Equal(2, handler.ContextStartInvoked); Assert.Equal(2, handler.ContextStopInvoked); } else { Assert.Equal(1, handler.ContextStartInvoked); Assert.Equal(1, handler.ContextStopInvoked); } } }
static HashSet <int> intersect(HashSet <int> a, HashSet <int> b) { a.IntersectWith(b); return(a); }
static void Main(string[] args) { // created an empty HashSet called "Showroom" HashSet <string> Showroom = new HashSet <string>(); // added 4 cars to "Showroom" Showroom.Add("Model 3"); Showroom.Add("4Runner"); Showroom.Add("Fusion"); Showroom.Add("Wrangler"); /* * added "Model 3" again to show that HashSets have UNIQUE items * so Model 3 will only appear once in the console */ Showroom.Add("Model 3"); // created an empty HashSet called "UsedLot" HashSet <string> UsedLot = new HashSet <string>(); // added two cars to "UsedLot" UsedLot.Add("Fiesta"); UsedLot.Add("F350"); // added the HashSet "UsedLot" to the HashSet "Showroom" Showroom.UnionWith(UsedLot); // removing 1 car from the "Showroom" HashSet Showroom.Remove("Fusion"); // printing to console to make sure cars were added Console.WriteLine("///// Showroom Console /////"); foreach (string vehicle in Showroom) { Console.WriteLine(vehicle); } // created an empty HashSet called "Junkyard" HashSet <string> Junkyard = new HashSet <string>(); // added 4 cars to the HashSet "Junkyard" Junkyard.Add("Camry"); Junkyard.Add("Sonata"); Junkyard.Add("Prius"); Junkyard.Add("Taurus"); // added 2 cars that are named the same from "Showroom" to "Junkyard" Junkyard.Add("Model 3"); Junkyard.Add("4Runner"); // created a new "Showroom" HashSet HashSet <string> clone = new HashSet <string>(Showroom); // checking to see if any vehicles in the clone of "Showroom" match in "Junkyard" clone.IntersectWith(Junkyard); // looping over the clone HashSet to see the matching vehicles Console.WriteLine("///// Clone Console /////"); foreach (string vehicle in clone) { Console.WriteLine(vehicle); } // added HashSet "Junkyard" to HashSet "Showroom" Showroom.UnionWith(Junkyard); // printing to console to make sure cars were added Console.WriteLine("///// Added Junkyard to Showroom Console ///// "); foreach (string vehicle in Showroom) { Console.WriteLine(vehicle); } }
/// <summary>Compute correlations between two entities for given ratings</summary> /// <param name="ratings">the rating data</param> /// <param name="entity_type">the entity type, either USER or ITEM</param> /// <param name="i">the ID of first entity</param> /// <param name="j">the ID of second entity</param> /// <param name="shrinkage">the shrinkage parameter</param> public static float ComputeCorrelation(IRatings ratings, EntityType entity_type, int i, int j, float shrinkage) { if (i == j) { return(1); } IList <int> ratings1 = (entity_type == EntityType.USER) ? ratings.ByUser[i] : ratings.ByItem[i]; IList <int> ratings2 = (entity_type == EntityType.USER) ? ratings.ByUser[j] : ratings.ByItem[j]; // get common ratings for the two entities HashSet <int> e1 = (entity_type == EntityType.USER) ? ratings.GetItems(ratings1) : ratings.GetUsers(ratings1); HashSet <int> e2 = (entity_type == EntityType.USER) ? ratings.GetItems(ratings2) : ratings.GetUsers(ratings2); e1.IntersectWith(e2); int n = e1.Count; if (n < 2) { return(0); } List <Ratings> ratings_by_other_entity = (entity_type == EntityType.USER) ? ratings.ByItem : ratings.ByUser; double sum_ij = 0; double sum_ii = 0; double sum_jj = 0; foreach (int other_entity_id in e1) { double average_rating = ratings_by_other_entity[other_entity_id].Average; // get ratings double r1 = 0; double r2 = 0; if (entity_type == EntityType.USER) { r1 = ratings.Get(i, other_entity_id, ratings1); r2 = ratings.Get(j, other_entity_id, ratings2); } else { r1 = ratings.Get(other_entity_id, i, ratings1); r2 = ratings.Get(other_entity_id, j, ratings2); } double dev_i = r1 - average_rating; double dev_j = r2 - average_rating; // update sums sum_ij += dev_i * dev_j; sum_ii += dev_i * dev_i; sum_jj += dev_j * dev_j; } double denominator = Math.Sqrt(sum_ii * sum_jj); if (denominator == 0) { return(0); } double adjusted_cosine = sum_ij / denominator; return((float)adjusted_cosine * (n / (n + shrinkage))); }
private void ProcessSubgraph(HashSet <int> subgraph) { if (subgraph.Count == 0) { return; } int pivot = g.PivotFromSet(subgraph); HashSet <int> forward = g.Forward(pivot, subgraph); HashSet <int> backward = g.Backward(pivot, subgraph); // Need to clone because IntersectWith modifies the existing set // and we need the original forward for th next step HashSet <int> SCC = new HashSet <int>(forward, forward.Comparer); SCC.IntersectWith(backward); // ResultSet has the locks so no need here this.result.Add(SCC); // Calculate the remainder set subgraph.ExceptWith(forward); subgraph.ExceptWith(backward); forward.ExceptWith(SCC); backward.ExceptWith(SCC); // Does not appear to speed it up, at least not significantly. /* * int threshold = 20; * * if (subgraph.Count < threshold) * { * ProcessSubgraph(subgraph); * } else * { * this.taskList.Enqueue(subgraph); * } * * if (forward.Count < threshold) * { * ProcessSubgraph(forward); * } * else * { * this.taskList.Enqueue(forward); * } * * if (backward.Count < threshold) * { * ProcessSubgraph(backward); * } * else * { * this.taskList.Enqueue(backward); * } */ this.taskList.Enqueue(subgraph); this.taskList.Enqueue(forward); this.taskList.Enqueue(backward); lock (pulseLock) { Monitor.PulseAll(pulseLock); } return; }
public static IEnumerable <ElementId> ElementIdsOfFamilyTypes(this Document document, string familyName = null, string familyTypeName = null, bool caseSensitive = true, IEnumerable <ElementId> ids = null) { if (document == null) { return(null); } HashSet <ElementId> result = new HashSet <ElementId>(); IEnumerable <ElementType> elementTypes = new FilteredElementCollector(document).OfClass(typeof(ElementType)).Cast <ElementType>(); if (!string.IsNullOrEmpty(familyName)) { if (caseSensitive) { elementTypes = elementTypes.Where(x => x.FamilyName == familyName); } else { elementTypes = elementTypes.Where(x => !string.IsNullOrEmpty(x.FamilyName) && x.FamilyName.ToUpper() == familyName.ToUpper()); } if (elementTypes.Count() == 0) { BH.Engine.Reflection.Compute.RecordError("Couldn't find any family named " + familyName + "."); return(result); } } if (!string.IsNullOrEmpty(familyTypeName)) { if (caseSensitive) { elementTypes = elementTypes.Where(x => x.Name == familyTypeName); } else { elementTypes = elementTypes.Where(x => !string.IsNullOrEmpty(x.Name) && x.Name.ToUpper() == familyTypeName.ToUpper()); } if (elementTypes.Count() > 1) { BH.Engine.Reflection.Compute.RecordWarning(String.Format("More than one family type named {0} has been found. It may be desirable to narrow down the search by specifying family name explicitly.", familyTypeName)); } } if (elementTypes.Count() == 0) { string error = "Couldn't find any family type named " + familyTypeName; if (!string.IsNullOrEmpty(familyName)) { error += " in the family " + familyName + "."; } else { error += "."; } BH.Engine.Reflection.Compute.RecordError(error); return(result); } result = new HashSet <ElementId>(elementTypes.Select(x => x.Id)); if (ids != null) { result.IntersectWith(ids); } return(result); }
public override void AddEdge(Node src, Node dest) { for (; SortedNodes.Count < Nodes.Count;) { SortedNodes.Add(Nodes[SortedNodes.Count]); } if (src.Index < dest.Index) { src.Outgoing.Add(dest); dest.Incoming.Add(src); return; } _deltaPlus.Clear(); Dfs(dest, _deltaPlus, n => n.Outgoing, dest.Index, src.Index); _deltaMinus.Clear(); Dfs(src, _deltaMinus, n => n.Incoming, dest.Index, src.Index); _deltaPlusList.Clear(); _deltaPlusList.AddRange(_deltaPlus.OrderBy(node => node.Index)); _deltaPlus.IntersectWith(_deltaMinus); if (_deltaPlus.Count > 0) { throw new InvalidOperationException("The edge to be added would introduce a cycle"); } src.Outgoing.Add(dest); dest.Incoming.Add(src); _deltaMinusList.Clear(); _deltaMinusList.AddRange(_deltaMinus.OrderBy(node => node.Index)); _merged.Clear(); int f = 0, s = 0; while (f < _deltaPlusList.Count && s < _deltaMinusList.Count) { if (_deltaPlusList[f].Index < _deltaMinusList[s].Index) { _merged.Add(_deltaPlusList[f].Index); f++; } else { _merged.Add(_deltaMinusList[s].Index); s++; } } for (; f < _deltaPlusList.Count; f++) { _merged.Add(_deltaPlusList[f].Index); } for (; s < _deltaMinusList.Count; s++) { _merged.Add(_deltaMinusList[s].Index); } var cEntry = 0; for (int q = 0; q < _merged.Count; q++) { var cNode = cEntry < _deltaMinusList.Count ? _deltaMinusList[cEntry] : _deltaPlusList[cEntry - _deltaMinusList.Count]; cEntry++; SortedNodes[_merged[q]] = cNode; } for (int q = 0; q < _merged.Count; q++) { SortedNodes[_merged[q]].Index = _merged[q]; } }
public static string method_1(string s1, string s2) { //ref: https://www.dotnetperls.com/arraylist //array string[] array_01 = new string[50]; //you cannot simply add new value to array like push //string array string[] array_02 = new string[] { "hi", "there", "hey" }; //int array int[] array_03 = new int[] { 1, 2, 3 }; //Create a list first for push and pop and then convert to array List <int> list_01 = new List <int>(); list_01.Add(1); list_01.Add(2); list_01.Remove(1); //list_01.RemoveAt(1); //List To array with Push var list_01_array = list_01.ToArray(); //array to List: //list_01.ToList(); //string to array string test1 = "hi there"; string[] array_04 = new string[] { test1 }; //string to character array char[] char_1 = s1.ToCharArray(); for (int i = 0; i < char_1.Length; i++) { char indiviual_Char = char_1[i]; } //Array Operations list_01_array.Average(); //min//max //sum // list_01_array.Contains(1); //distinct //except //firstOrDefault //First() //Count() array_03.Count(); //value of the element at index 0 list_01_array.ElementAt(0); //check for equality of arrays bool isEqual = list_01_array.Equals(array_03); //array length int len = array_03.Length; //order by descending/ascending array_03.OrderByDescending(a => a); //reverse an array array_03.Reverse(); //except//intersect//unioun //Any() gives bool for existenece array_03.Where(a => a > 0); array_03.Select(a => a > 0); array_03.GroupBy(a => a); //List: List <int> list_02 = new List <int>() { 1, 2, 3 }; List <int> list_duplicate_elem = new List <int>() { 1, 2, 3, 1, 1 }; //Print a list list_02.ForEach(a => Console.WriteLine(a)); //to dictionary with unique index each: //Method-1: Dictionary <int, int> dResult = new Dictionary <int, int>(); for (int i = 0; i < list_duplicate_elem.Count; i++) { dResult.Add(i, list_duplicate_elem[i]); } //Method-2: Dictionary <int, int> dResult_02 = list_duplicate_elem .Select((value, index) => new { value, index }) .ToDictionary(x => x.index + 1, x => x.value); //To Dictionary array_03.ToDictionary(key => key, value => value); array_03.ToDictionary(a => a, a => a); //ToDictionary: [List with Duplicate elements] list_duplicate_elem.GroupBy(a => a).ToDictionary(a => a.Key, a => a.ToList()); //Map: Dictionary <string, List <int> > dict_02 = new Dictionary <string, List <int> >(); dict_02.Add("name", list_02); //Loop Over Dictionary: //Type-1 foreach (var item in dResult_02) { var key = item.Key; var value = item.Value; } //Type-2 foreach (KeyValuePair <int, int> item in dResult_02) { var key = item.Key; var value = item.Value; } //copying elements from one dictionary to another key by key: Dictionary <string, List <int> > map = new Dictionary <string, List <int> >(); foreach (var key in dict_02.Keys) { map.Add(key, dict_02[key].Select(s => s).ToList()); } //Stack Stack <int> stack = new Stack <int>(); stack.Push(100); stack.Push(200); stack.Pop(); //returns top element var top = stack.Peek(); foreach (int i in stack) { var stackItem = i; } //array to stack string[] array_values = { "Dot", "Net", "Perls" }; // Array to Stack var stack_from_array = new Stack <string>(array_values); //List to Stack var stack_from_List = new Stack <int>(list_duplicate_elem); //stack to list and array: stack.ToList(); stack.ToArray(); //all operations of array and list are in stack also //Stack - //where //select //distinct //except //firstOrDefault //First() stack.GroupBy(a => a).ToDictionary(key => key, value => value); //Queue Queue <int> q = new Queue <int>(); q.Enqueue(5); // Add 5 to the end of the Queue. q.Enqueue(10); // Then add 10. 5 is at the start. q.Enqueue(15); // Then add 15. q.Enqueue(20); // Then add 20. q.Dequeue(); //q to array q.ToArray(); // Array to Stack var queue_from_array = new Queue <string>(array_values); //arrayList ArrayList arrayList = new ArrayList(); string[] array_of_string = new string[] { "a", "b" }; arrayList.Add(array_of_string); int[] array_of_int = new int[] { 1, 2, 3 }; arrayList.Add(array_of_int); //Hash Set //A HashSet is an unordered collection of the unique elements //prevent duplicates from being inserted in the collection //performance is concerned, it is better in comparison to the list. //https://www.c-sharpcorner.com/article/working-with-hashset-in-c-sharp/ HashSet <int> hashSet = new HashSet <int>(); HashSet <string> hashSet_names = new HashSet <string> { "Rajeev", "Akash", "Amit" }; HashSet <string> hashSet_names_02 = new HashSet <string> { "Rajeev_2", "Akash_2", "Amit_2" }; //duplicates are not added into collection. hashSet_names.Add("Rajeev"); foreach (var name in hashSet_names) { Console.WriteLine(name); } //all the same operations as array and list //unique operations mainiting distinct property of sets: hashSet_names.UnionWith(hashSet_names_02); hashSet_names.IntersectWith(hashSet_names_02); //all the names in list1 not in list2 hashSet_names.ExceptWith(hashSet_names_02); //Get unique array set from hash set or apply Distinct: //List to hashset HashSet <int> hashSet_from_list = new HashSet <int>(list_duplicate_elem); //Array to HashSet HashSet <int> hashSet_from_array = new HashSet <int>(list_duplicate_elem.ToArray()); foreach (var set in hashSet_from_array) { Console.WriteLine(set); } //Sorted Set //https://www.dotnetperls.com/sortedset SortedSet <string> sortedSet = new SortedSet <string>(); // Add 4 elements. sortedSet.Add("perls"); sortedSet.Add("net"); sortedSet.Add("dot"); sortedSet.Add("sam"); // Remove an element. sortedSet.Remove("sam"); // Print elements in set. foreach (string val in sortedSet) // ... Alphabetical order. { Console.WriteLine(val); } //unique functionalities: sortedSet.RemoveWhere(element => element.StartsWith("s")); //sortedSet.Clear(); List <string> list_with_set = new List <string>(); list_with_set.Add("a"); list_with_set.Add("y"); sortedSet.ExceptWith(list_with_set); //overlaps: common elements with list bool isOverlap = sortedSet.Overlaps(list_with_set); // Union the two collections. sortedSet.UnionWith(list_with_set); //IntersectWith //isSubsetOf //SetEquals //Reverse //Substring string input = "OneTwoThree"; // Get first three characters. string sub = input.Substring(0, 3); //String ahs this ubstring or not? input.Contains(sub); //Iterating over characters in string: int counter = 0; for (int e = 0; e < s1.Length; e++) { if (s1[e] == '.') { counter++; } } //Index Of: const string value_Index_Check = "Your dog is cute."; // Test with IndexOf method. if (value_Index_Check.IndexOf("dog") != -1) { Console.WriteLine("string contains dog!"); } // Get index of character then find substring using that character: const string dummy_01 = "I have a cat"; // Location of the letter c. int index_of_c = dummy_01.IndexOf('c'); // Remainder of string starting at c. string d = dummy_01.Substring(index_of_c); Console.WriteLine(d); //Loop through all instances of letter a // The input string. string dummy_string = "I have a cat"; // Loop through all instances of the letter a. int k_ = 0; while ((k_ = dummy_string.IndexOf('a', k_)) != -1) { // Print out the substring. Console.WriteLine(dummy_string.Substring(k_)); // Increment the index. k_++; } /*ave a cat * a cat * at*/ string comma_string = ":100,200"; // Skip the first character with a startIndex of 1. int comma = comma_string.IndexOf(',', 1); Console.WriteLine(comma); //concatenate string s1_cat = "string1"; string s2_cat = "string2"; string s3_cat = string.Concat(s1_cat, s2_cat); Console.WriteLine(s2); //LAST Index of and Ignore Case both used: // Find the last occurrence of this string, ignoring the case. string value_lastIndex = "Dot Net Perls"; int index4 = value_lastIndex.LastIndexOf("PERL", StringComparison.InvariantCultureIgnoreCase); if (index4 != -1) { Console.WriteLine(index4); Console.WriteLine(value_lastIndex.Substring(index4)); } //String Opertaions: //https://www.dotnetperls.com/string //Compare //This method determines the sort order of strings. //If the first string is bigger, the result is 1. If the first string is smaller, the result is -1. //If both strings are equal, the result is 0 string.Compare(s1, s2); s1.CompareTo(s2); //equality bool isStringEqual = s1.Equals(s2); bool isEqual_comp = s1 == s2 ? true : false; //Null empty etc. String.IsNullOrWhiteSpace("cc"); String.IsNullOrEmpty("aaa"); //CopyTo string value1 = "Dot Net Perls"; char[] array1 = new char[3]; // Copy the fifth, sixth, and seventh characters to the array. value1.CopyTo(4, array1, 0, 3); // Output the array we copied to. Console.WriteLine("--- Destination array ---"); Console.WriteLine(array1.Length); Console.WriteLine(array1); //Starts With //Ends With if (input.StartsWith("http://www.site.com")) { Console.WriteLine(true); } //Insert Type-1 string names = "Romeo Juliet"; string shakespeare = names.Insert(6, "and "); Console.WriteLine(shakespeare); //Insert substring in string string names2 = "The Taming of Shrew"; int index2 = names2.IndexOf("of "); string shakespeare2 = names2.Insert(index2 + "of ".Length, "the "); Console.WriteLine(shakespeare2); //The Taming of >>the<< Shrew, the being inserted here //Remove string test1_r = "0123456"; // ... Start removing at index 3 till the end //012 is the output string result1 = test1_r.Remove(3); // ... Displays the first three characters. // 2. Remove range of characters in string. string test2 = "012 345 678"; int index1 = test2.IndexOf(' '); int index2_r = test2.IndexOf(' ', index1 + 1); string result2 = test2.Remove(index1, index2_r - index1); Console.WriteLine(result2);// 012 678 is the output: //Replace: const string input_replace = "key tool"; string output_replaced = input_replace.Replace("key ", "keyword "); //minify-replace string p = " \n oop"; p = p.Replace(Environment.NewLine, string.Empty); //HashCode as used in Hashtables: string value_code = ""; for (int i = 0; i < 10; i++) { value_code += "x"; // This calls the unsafe code listed on this page. Console.WriteLine("GETHASHCODE: " + value_code.GetHashCode()); } //SPLIT: string data = "there is a cat"; // Split string on spaces (this will separate all the words). string[] words = data.Split(' '); foreach (string word in words) { Console.WriteLine("WORD: " + word); } //Regex: string value_regex = "cat\r\ndog\r\nanimal\r\nperson"; // Split the string on line breaks. // ... The return value from Split is a string array. string[] lines = Regex.Split(value_regex, "\r\n"); //Regex-Split on all words const string sentence = "Hello, my friend"; // Split on all non-word characters. // ... This returns an array of all the words. string[] words_reg = Regex.Split(sentence, @"\W+"); foreach (string value in words_reg) { Console.WriteLine("WORD: " + value); } foreach (string line in lines) { Console.WriteLine(line); } //Split on directory spacer string dir = "c:/gg?bb"; string[] parts = dir.Split('\\'); //more than one delimiter string[] result_array = dir.Split(new char[] { ' ', ',' }); //JOIN: //Convert Array to String Comma sepearted string[] arr_join = { "one", "two", "three" }; Console.WriteLine(string.Join(",", arr_join)); //one,two,three //Length: string abc_01 = "v"; int len_string = abc_01.Length; if (arr_join[arr_join.Length - 1] == "abc") { } //pad right string s_pad = "cat".PadRight(10, '0'); //cat 0000000 with 10 spaces //to lower to upper: string lower = s_pad.ToLower(); //Trim: string st = " This is an example string. "; // Call Trim instance method. // This returns a new string copy. st = st.Trim(); //Convert List to String Comma seperated var list_join = new List <string>() { "cat", "dog", "rat" }; // Join the strings from the List. string joined = string.Join <string>("*", list_join); //cat*dog*rat int[,] matrix = new int[5, 2] { { 0, 0 }, { 1, 2 }, { 2, 4 }, { 3, 6 }, { 4, 8 } }; int[,] matrix_02 = new int[3, 4] { { 0, 1, 2, 3 }, /* initializers for row indexed by 0 */ { 4, 5, 6, 7 }, /* initializers for row indexed by 1 */ { 8, 9, 10, 11 } /* initializers for row indexed by 2 */ }; //getting value from matrix int matrix_val = matrix_02[2, 3]; int ii, jj; //5x2 matrix: for (ii = 0; ii < 5; ii++) { for (jj = 0; jj < 2; jj++) { Console.WriteLine("a[{0},{1}] = {2}", ii, jj, matrix[ii, jj]); } } //substrings and print combinations string pattern_string = "abcdefghi"; for (int i = 1; i < pattern_string.Length; i++) { // Be careful with the end index. for (int j = 0; j <= pattern_string.Length - i; j++) { string substring = pattern_string.Substring(i, j); //Console.WriteLine(substring); //all combinations of 1 //all combinations of 2 } } //Remove Elements while Traversing: List <int> lst = new List <int>(); lst.Add(1); lst.Add(2); lst.Add(3); foreach (var item in lst) { if (item > 1) { //lst.Remove(item); //Not possible - will generate error! } } //Method:1 Remove by creating a reference of list in memory: foreach (var item in lst.ToList()) { if (item == 2) { lst.Remove(item); //Works!! } } lst.ForEach(a => Console.WriteLine(a)); //Method-2 list in reverse order: for (int i = lst.Count - 1; i > 0; i--) { if (lst[i] == 3) { lst.RemoveAt(i); } } lst.ForEach(a => Console.WriteLine(a)); //Read Comma Speaerated Input File and Split on comma /*int io = 0; * foreach (string line in File.ReadAllLines("TextFile1.txt")) * { * string[] parts_string_array = line.Split(','); * foreach (string part in parts_string_array) * { * Console.WriteLine("{0}:{1}", io, part); * } * io++; // For demonstration. * }*/ return(null); }
public override System.Collections.IEnumerator Execute(UTContext context) { var theFirstListProperty = firstListProperty.EvaluateIn(context); if (string.IsNullOrEmpty(theFirstListProperty)) { throw new UTFailBuildException("You must specify the property holding the first list.", this); } var theSecondListProperty = secondListProperty.EvaluateIn(context); if (string.IsNullOrEmpty(theFirstListProperty)) { throw new UTFailBuildException("You must specify the property holding the second list.", this); } var theOutputProperty = outputProperty.EvaluateIn(context); if (string.IsNullOrEmpty(theFirstListProperty)) { throw new UTFailBuildException("You must specify the output property.", this); } var firstEnumerable = context [theFirstListProperty]; if (!(firstEnumerable is IEnumerable)) { if (firstEnumerable == null) { throw new UTFailBuildException("Property '" + theFirstListProperty + "' has a null value. Cannot combine this.", this); } throw new UTFailBuildException("Property '" + theFirstListProperty + "' is of type '" + firstEnumerable.GetType() + "'. Cannot combine this.", this); } var secondEnumerable = context [theSecondListProperty]; if (!(secondEnumerable is IEnumerable)) { if (secondEnumerable == null) { throw new UTFailBuildException("Property '" + theSecondListProperty + "' has a null value. Cannot combine this.", this); } throw new UTFailBuildException("Property '" + theSecondListProperty + "' is of type '" + secondEnumerable.GetType() + "'. Cannot combine this.", this); } var firstListAsSet = new HashSet <object> (); foreach (var obj in (IEnumerable)firstEnumerable) { firstListAsSet.Add(obj); } var secondListAsSet = new HashSet <object> (); foreach (var obj in (IEnumerable)secondEnumerable) { secondListAsSet.Add(obj); } var theListOperationType = listOperationType.EvaluateIn(context); switch (theListOperationType) { case UTCombineListOperation.Union: firstListAsSet.UnionWith(secondListAsSet); break; case UTCombineListOperation.Intersect: firstListAsSet.IntersectWith(secondListAsSet); break; case UTCombineListOperation.Subtract: firstListAsSet.ExceptWith(secondListAsSet); break; case UTCombineListOperation.ExclusiveOr: firstListAsSet.SymmetricExceptWith(secondListAsSet); break; } context [theOutputProperty] = firstListAsSet; yield return(""); }
public void Receive(Action <GalaxyID> onConnection, Action <GalaxyID, Stream> onMessage, Action <GalaxyID> onDisconnect, Action <string> onError) { long timeNow = getTimeNow(); if (lobby == null) { if (lobbyOwner == selfId && recreateTimer > 0 && recreateTimer <= timeNow) { recreateTimer = 0L; tryCreateLobby(); } return; } string lobbyVersion = GalaxyInstance.Matchmaking().GetLobbyData(lobby, "protocolVersion"); if (lobbyVersion != "" && lobbyVersion != protocolVersion) { onError("Strings\\UI:CoopMenu_FailedProtocolVersion"); Close(); return; } foreach (GalaxyID lobbyMember in LobbyMembers()) { if (!connections.ContainsKey(lobbyMember.ToUint64()) && !ghosts.Contains(lobbyMember.ToUint64())) { connections.Add(lobbyMember.ToUint64(), lobbyMember); onConnection(lobbyMember); } } ghosts.IntersectWith(from peer in LobbyMembers() select peer.ToUint64()); byte[] buffer = new byte[1300]; uint packetSize = 1300u; GalaxyID sender = new GalaxyID(); while (GalaxyInstance.Networking().ReadP2PPacket(buffer, (uint)buffer.Length, ref packetSize, ref sender)) { lastMessageTime[sender.ToUint64()] = timeNow; if (!connections.ContainsKey(sender.ToUint64()) || buffer[0] == byte.MaxValue) { continue; } bool incomplete = buffer[0] == 1; MemoryStream messageData2 = new MemoryStream(); messageData2.Write(buffer, 4, (int)(packetSize - 4)); if (incompletePackets.ContainsKey(sender.ToUint64())) { messageData2.Position = 0L; messageData2.CopyTo(incompletePackets[sender.ToUint64()]); if (!incomplete) { messageData2 = incompletePackets[sender.ToUint64()]; incompletePackets.Remove(sender.ToUint64()); messageData2.Position = 0L; onMessage(sender, messageData2); } } else if (incomplete) { messageData2.Position = messageData2.Length; incompletePackets[sender.ToUint64()] = messageData2; } else { messageData2.Position = 0L; onMessage(sender, messageData2); } } List <GalaxyID> disconnectedPeers = new List <GalaxyID>(); foreach (GalaxyID peer3 in connections.Values) { if (!lobbyContains(peer3) || ghosts.Contains(peer3.ToUint64())) { disconnectedPeers.Add(peer3); } } foreach (GalaxyID peer2 in disconnectedPeers) { onDisconnect(peer2); close(peer2); } }
static void Main(string[] args) { HashSet <int> A = new HashSet <int>(); HashSet <int> B = new HashSet <int>(); A.Add(3); A.Add(5); A.Add(8); A.Add(9); B.Add(9); B.Add(3); B.Add(4); B.Add(5); // NÃO ACEITA REPETIÇÕES, o valor 4 não irá entrar novamente no Conjunto: B.Add(4); B.Remove(9); foreach (int x in B) { Console.WriteLine(x); } Console.WriteLine("---------------"); A.ExceptWith(B); foreach (int x in A) { Console.WriteLine(x); } Console.WriteLine("---------------"); A.UnionWith(B); foreach (int x in A) { Console.WriteLine(x); } Console.WriteLine("---------------"); A.IntersectWith(B); foreach (int x in A) { Console.WriteLine(x); } Console.WriteLine("---------------"); Console.Write("Digite um valor inteiro: "); int N = int.Parse(Console.ReadLine()); if (B.Contains(N)) { Console.WriteLine(N + " pertence ao conjunto B!"); } else { Console.WriteLine(N + " não pertence ao conjunto B!"); } }
static void Main() { // 1. List (Size incerease with multiplication factor of 2 at Each resize ) List <int> list = new List <int>() { 1, 2, 3, 4, 5, 6, 7, 8 }; list.Add(9); Console.WriteLine("List : "); foreach (var num1 in list) { Console.Write("{0} ", num1); } Console.WriteLine(); Console.WriteLine(list.Count); Console.WriteLine("{0}\n", list.Capacity); // 2. Queue Queue <int> queue = new Queue <int>(); queue.Enqueue(1); queue.Enqueue(2); queue.Enqueue(3); queue.Enqueue(4); queue.Enqueue(5); Console.WriteLine("Queue : "); while (queue.Count > 0) { Console.Write("{0} ", queue.Dequeue()); } Console.WriteLine("\n{0}\n", queue.Count); // 3. Stack Stack <int> stack = new Stack <int>(); stack.Push(1); stack.Push(2); stack.Push(3); stack.Push(4); stack.Push(5); Console.WriteLine("Stack : "); while (stack.Count > 0) { Console.Write("{0} ", stack.Pop()); } Console.WriteLine("\n{0}\n", queue.Count); // 4. HashSet (Stores all unique Elements) HashSet <int> set1 = new HashSet <int>() { 2, 3, 1, 9, 4 }; HashSet <int> set2 = new HashSet <int>() { 8, 3, 2, 9, 5 }; HashSet <int> set3 = new HashSet <int>() { 2, 3, 1, 9, 4 }; HashSet <int> set4 = new HashSet <int>() { 2, 3, 1, 9, 4 }; HashSet <int> set5 = new HashSet <int>() { 2, 3, 1, 9, 4 }; set1.Add(6); Console.WriteLine("HashSet : "); foreach (var num2 in set1) { Console.Write("{0} ", num2); } set1.IntersectWith(set2); Console.Write("\nIntersecion of set1 and set2 : "); foreach (var num3 in set1) { Console.Write("{0} ", num3); } set3.SetEquals(set2); Console.Write("\nSetting Equal to set2 : "); foreach (var num4 in set3) { Console.Write("{0} ", num4); } set4.UnionWith(set2); Console.Write("\nUnion of set1 and set2 : "); foreach (var num5 in set4) { Console.Write("{0} ", num5); } set5.SymmetricExceptWith(set2); Console.Write("\nSymmetricExceptWith of set1 and set2 : "); foreach (var num6 in set5) { Console.Write("{0} ", num6); } // 5. LinkList (Doublly Linked List) LinkedList <int> linklist = new LinkedList <int>(); linklist.AddFirst(1); linklist.AddLast(2); linklist.AddBefore(linklist.First, 3); linklist.AddAfter(linklist.Last, 4); linklist.AddLast(5); Console.WriteLine("\n\nLinked List : "); var node1 = linklist.First; while (node1 != null) { Console.Write("{0} ", node1.Value); node1 = node1.Next; } Console.WriteLine(); var node2 = linklist.Last; while (node2 != null) { Console.Write("{0} ", node2.Value); node2 = node2.Previous; } // 6. Dictionary Dictionary <int, string> map = new Dictionary <int, string>(); map.Add(1, "One"); map.Add(2, "Two"); map.Add(3, "Three"); map.Add(4, "Four"); map.Add(5, "Five"); Console.WriteLine("\n\nDictionary / Map : "); foreach (var num7 in map) { Console.WriteLine("{0} : {1} ", num7.Key, num7.Value); } var sortedList = new SortedList <int, string>(); var sortedset = new SortedSet <int>(); var sortedDictionart = new SortedDictionary <int, string>(); // Sorted Colletions store data int sorted manner // Sorted Dictionary and Sorted Lists are almost same with one key difference. // Sorted Dictionary is optimised for efficient and quick insert and removal user more memory. // Sorted Lists are optimised for quick iterations used less memory. Console.ReadLine(); }
/// <summary> /// Migrates a V2 schema to a V3 schema. The V3 schema maintains a /// copy of 'MaxAge' and 'MaxCount' in the streams table for /// performance reasons. This modifies the schema and iterates over /// all streams lifting `MaxAge` and `MaxCount` if the stream has /// metadata. Migration progress is logged and reported. /// /// As usual, ensure you have the database backed up. /// </summary> /// <param name="progress">A provider that can receive progress updates.</param> /// <param name="cancellationToken">The cancellation instruction.</param> /// <returns>A Task representing the asynchronous operation.</returns> public async Task Migrate(IProgress <MigrateProgress> progress, CancellationToken cancellationToken) { GuardAgainstDisposed(); Logger.Info("Starting migration from schema v2 to v3..."); try { var checkSchemaResult = await CheckSchema(cancellationToken); if (checkSchemaResult.IsMatch()) { Logger.Info("Nothing to do, schema is already v3."); return; } if (checkSchemaResult.CurrentVersion != 2) { string message = $"Schema did not match expected version for migtation - 2. Actual version is {checkSchemaResult.CurrentVersion}"; throw new InvalidOperationException(message); } progress.Report(new MigrateProgress(MigrateProgress.MigrateStage.SchemaVersionChecked)); // Migrate the schema using (var connection = _createConnection()) { await connection.OpenAsync(cancellationToken).NotOnCapturedContext(); using (var command = new SqlCommand(_scripts.Migration_v3, connection)) { command.CommandTimeout = _commandTimeout; await command .ExecuteNonQueryAsync(cancellationToken) .NotOnCapturedContext(); } } progress.Report(new MigrateProgress(MigrateProgress.MigrateStage.SchemaMigrated)); // Load up the stream IDs that have metadata. Logger.Info("Schema migrated. Starting data migration. Loading stream Ids..."); HashSet <string> streamIds = new HashSet <string>(); HashSet <string> metadataStreamIds = new HashSet <string>(); var listStreamsResult = await ListStreams(int.MaxValue, cancellationToken : cancellationToken); foreach (var streamId in listStreamsResult.StreamIds) { if (streamId.StartsWith("$$")) { metadataStreamIds.Add(streamId.Substring(2)); } else { streamIds.Add(streamId); } } streamIds.IntersectWith(metadataStreamIds); progress.Report(new MigrateProgress(MigrateProgress.MigrateStage.StreamIdsLoaded)); // Migrate data Logger.Info("{count} streams to be processed...", streamIds.Count); int i = 0; foreach (var streamId in streamIds) { var metadata = await GetStreamMetadataInternal(streamId, cancellationToken); if (metadata != null) { Logger.Info("Migrating stream {streamId} ({current}/{total}", streamId, i, streamIds.Count); using (var connection = _createConnection()) { await connection.OpenAsync(cancellationToken).NotOnCapturedContext(); using (var command = new SqlCommand(_scripts.SetStreamMetadata, connection)) { command.CommandTimeout = _commandTimeout; command.Parameters.Add(new SqlParameter("streamId", SqlDbType.Char, 42) { Value = new StreamIdInfo(streamId).SqlStreamId.Id }); command.Parameters.AddWithValue("streamIdOriginal", "ignored"); command.Parameters.Add("maxAge", SqlDbType.Int); command.Parameters["maxAge"].Value = (object)metadata.MaxAge ?? DBNull.Value; command.Parameters.Add("maxCount", SqlDbType.Int); command.Parameters["maxCount"].Value = (object)metadata.MaxCount ?? DBNull.Value; await command.ExecuteNonQueryAsync(cancellationToken); } } } i++; } progress.Report(new MigrateProgress(MigrateProgress.MigrateStage.MetadataMigrated)); } catch (Exception ex) { Logger.ErrorException("Error occured during migration", ex); throw; } }
internal static void ActParentSort() { if (btnLock) { return; } btnLock = true; int nowAccCount = MoreAccessories.PluginInstance._charaMakerData.nowAccessories.Count; int dstSlot = -1; Dictionary <int, string> parts = new Dictionary <int, string>(); for (int i = 0; i < (20 + nowAccCount); i++) { ChaFileAccessory.PartsInfo part = MoreAccessories.GetPartsInfo(i); if (part.type == 120) { continue; } parts[i] = part.parentKey; dstSlot = i; } if (parts.Count == 0) { Logger.LogMessage("Nothing to do"); btnLock = false; return; } dstSlot++; HashSet <string> parentsUsed = new HashSet <string>(parts.OrderBy(x => x.Value).Select(x => x.Value)); HashSet <string> parentsDefined = new HashSet <string>(Enum.GetNames(typeof(ChaAccessoryDefine.AccessoryParentKey)).Where(x => x.StartsWith("a_n_"))); parentsDefined.IntersectWith(parentsUsed); List <string> parentSorted = parentsDefined.ToList(); parentSorted.AddRange(parentsUsed.Where(x => !x.StartsWith("a_n_"))); List <QueueItem> Queue = new List <QueueItem>(); bool changed = false; int max = -1; foreach (string parent in parentSorted) { foreach (KeyValuePair <int, string> part in parts) { if (part.Value != parent) { continue; } if (max > part.Key) { changed = true; } max = part.Key; Queue.Add(new QueueItem(part.Key, dstSlot)); dstSlot++; } } if (!changed) { Logger.LogMessage("Same order, nothing to do"); btnLock = false; return; } if (dstSlot - 19 > nowAccCount) { for (int i = 1; i < (dstSlot - 19 - nowAccCount); i++) { Traverse.Create(MoreAccessories.PluginInstance).Method("AddSlot").GetValue(); } } ProcessQueue(Queue); btnLock = false; ChaCustom.CustomBase.Instance.chaCtrl.ChangeCoordinateTypeAndReload(false); }
// Verify prog while tracking only variables in trackedVars. // Returns true if no error is found. // Returns false if error is found. Returns counterexample via pout // If returnTrace is set to false, then pout is always null // (no counterexample generation is attempted) // cex: the error trace in prog (if there is one) // tinfo: the transformation carried out in going from prog to pout // Call this method via: checkProgram or checkPath private static bool VerifyProgram(ref PersistentCBAProgram prog, VarSet trackedVars, bool returnTrace, out PersistentCBAProgram pout, out InsertionTrans tinfo, out ErrorTrace cex) { PersistentCBAProgram curr = prog; pout = null; cex = null; tinfo = null; ////// // These are the compilation phases ////// VariableSlicePass cp1 = new VariableSlicePass(trackedVars); StormInstrumentationPass cp2 = null; var recordK = new HashSet <string>(); if (!GlobalConfig.isSingleThreaded) { cp2 = new StormInstrumentationPass(); } StaticInliningAndUnrollingPass cp3 = null; if (GlobalConfig.staticInlining > 0) { cp3 = new StaticInliningAndUnrollingPass(new StaticSettings(CommandLineOptions.Clo.RecursionBound, CommandLineOptions.Clo.RecursionBound)); } ContractInfer ciPass = null; // Run the source transformations curr = cp1.run(curr); if (cp2 != null) { curr = cp2.run(curr); } if (cp3 != null) { curr = cp3.run(curr); } // infer contracts if (GlobalConfig.InferPass != null) { ciPass = new ContractInfer(GlobalConfig.InferPass); ciPass.ExtractLoops = false; curr = ciPass.run(curr); Console.WriteLine("Houdini took {0} seconds", ciPass.lastRun.TotalSeconds.ToString("F2")); GlobalConfig.InferPass = null; // add summaries to the original program prog = ciPass.addSummaries(prog); } // record k and tid if (cp2 != null) { recordK.Add(cp2.varKName); recordK.Add(cp2.tidVarName); } if (GlobalConfig.varsToRecord.Count != 0) { recordK.UnionWith(GlobalConfig.varsToRecord); recordK.IntersectWith(trackedVars.Variables); } // Now verify VerificationPass cp4 = new VerificationPass(true, recordK); curr = cp4.run(curr); reachedBound = cp4.reachedBound; if (cp4.success) { // Program correct. return(true); } else if (!returnTrace) { return(false); } else { // Concretize the trace and see if its a real bug // Concretization: map back the trace to the original program var trace4 = cp4.trace; //PrintProgramPath.print(cp4.input as PersistentCBAProgram, trace4, "temp4"); if (ciPass != null) { trace4 = ciPass.mapBackTrace(trace4); } var trace3 = trace4; if (cp3 != null) { trace3 = cp3.mapBackTrace(trace4); } //PrintProgramPath.print(cp3.input as PersistentCBAProgram, trace3, "temp3"); var trace2 = trace3; if (cp2 != null) { trace2 = cp2.mapBackTrace(trace3); } var trace1 = cp1.mapBackTrace(trace2); //PrintProgramPath.print(cp1.input as PersistentCBAProgram, trace1, "temp1"); cex = trace1; // Restrict the program to the trace tinfo = new InsertionTrans(); var traceProgCons = new RestrictToTrace(cp1.input.getProgram(), tinfo); ErrorTrace.fillInContextSwitchInfo(trace1); traceProgCons.addTrace(trace1); pout = new PersistentCBAProgram(traceProgCons.getProgram(), traceProgCons.getFirstNameInstance(cp1.getInput().mainProcName), cp1.getInput().contextBound, ConcurrencyMode.FixedContext); //pout.writeToFile("pout.bpl"); return(false); } }
static void Main(string[] args) { HashSet <string> showroom = new HashSet <string>() { "Focus", "Mustang", "RX-8", "Mazda 3" }; foreach (string car in showroom) { Console.WriteLine($"{car}"); } showroom.Add("Mazda 3"); Console.WriteLine("---------------------------------"); foreach (string car in showroom) { Console.WriteLine($"{car}"); } ; HashSet <string> usedCars = new HashSet <string>(showroom) { "F150", "Viper" }; List <string> multipleCars = new List <string>() { "Viper", "Viper", "F150", "F150", "Mustang", "Camaro" }; HashSet <string> movedshowroom = new HashSet <string>(multipleCars); movedshowroom.UnionWith(showroom); Console.WriteLine("--------------------------------------"); movedshowroom.Remove("Mazda 3"); foreach (string car in movedshowroom) { Console.WriteLine($"{car}"); } ; HashSet <string> clone = new HashSet <string>(movedshowroom); HashSet <string> junkyard = new HashSet <string>() { "Bronco", "Viper", "Altima", "Maxima", "Mustang" }; clone.IntersectWith(junkyard); Console.WriteLine("-------------------------------"); foreach (string car in movedshowroom) { Console.WriteLine($"{car}"); } ; junkyard.UnionWith(movedshowroom); foreach (string car in movedshowroom) { Console.WriteLine($"{car}"); } ; }
/// <summary>Modifies the current set so that it contains only elements that are also in a specified collection.</summary> /// <param name="other">The collection to compare to the current set.</param> /// <exception cref="T:System.ArgumentNullException"> /// <paramref name="other" /> is null. /// </exception> public void IntersectWith(IEnumerable <T> other) { _inner.IntersectWith(other); }
static void Main(string[] args) { List <string> Inventory = new List <string> () { "Camry", "F-150", "MDX", "Camry", "Camry", "Taurus", "F-150", "MDX", "Camry", "Xterra", "Mustang", "Suburban", "Santa Fe", "F-150", "Camry", "F-150", "F-150", "Mustang", "Camry", "Camry", "Camry", "Escalade", "Q30", "Camry", "MDX", }; HashSet <string> allModels = new HashSet <string> (); foreach (string model in Inventory) { allModels.Add(model); } // Display all unique model names foreach (string vehicle in allModels) { Console.WriteLine($"{vehicle}"); } /* * Output * * Camry * F-150 * MDX * Taurus * Xterra * Mustang * Suburban * Santa Fe * Escalade * Q30 */ // List<string> types = new List<string> () { // "Civic", // "Accord", // "Corolla", // "Kia Sole" // }; HashSet <string> showroom = new HashSet <string> () { "Corolla", "Kia Sole" }; // foreach (string model in types) showroom.Add("Civic"); showroom.Add("Accord"); showroom.Add("Accord"); // showroom.Add("Corolla"); // showroom.Add("Kia sSole"); // foreach (string car in showroom){ // Console.WriteLine($"{car}"); // } Console.WriteLine(showroom.Count); HashSet <string> UsedLot = new HashSet <string> (); UsedLot.Add("Centra"); UsedLot.Add("Carlos"); showroom.UnionWith(UsedLot); foreach (string car in showroom) { Console.WriteLine($"{car}"); } showroom.Remove("Centra"); foreach (string car in showroom) { Console.WriteLine($"------- Cars after removing one {car}"); } HashSet <string> Junkyard = new HashSet <string> () { "car 1", "car 2", "car 3", "Civic", "car 4" }; // this finds cars that match from your showroom and junkyard HashSet <string> clone = new HashSet <string>(showroom); clone.IntersectWith(Junkyard); Console.WriteLine("---Cars that match from junkyard to clone---"); foreach (string car in clone) { Console.WriteLine($"{car}"); } showroom.UnionWith(Junkyard); Console.WriteLine("---Junkyard and showroom mashed---"); foreach (string car in showroom) { Console.WriteLine($"{car}"); } showroom.Remove("car 1"); showroom.Remove("car 3"); showroom.Remove("Kia Sole"); Console.WriteLine("---Sold some cars---"); foreach (string car in showroom) { Console.WriteLine($"{car}"); } }
static void Main(string[] args) { // HASH SETS // What's cool about em? // You can't have duplicates // They're not stored in any particular order HashSet <int> grades = new HashSet <int>() { 75, 97, 51, 83 }; grades.Add(75); // Console.WriteLine(grades.Count); // Loop through teh grades // foreach (int number in grades) // { // Console.WriteLine(number); // } // Creat a new set of grades HashSet <int> secondGrades = new HashSet <int>() { 40, 89, 50, 97 }; // Combine the two sets of grades // grades.UnionWith(secondGrades); // Console.WriteLine(grades.Count); // create a copy of the original grades hashset (because IntersectWith modifies the original) HashSet <int> commonGrades = new HashSet <int>(grades); // Now we can safely use IntersectWith to find the common entries between the two hashsets commonGrades.IntersectWith(secondGrades); // foreach (int item in commonGrades) // { // Console.WriteLine(item); // } // DICTIONARIES // When should you use it? // Cannot include duplicate or null keys // Create dictionary of strings Dictionary <string, string> teacherNames = new Dictionary <string, string>() { { "LeadInstructor", "Steve" }, { "JuniorInstructor", "Emily" }, { "TA", "Jordan" }, }; //LOOPING THROUGH DICTIONARIES // The elements inside of dictionaries are KeyValuePairs<type, type> // Inside a key value pair, you can access the key with item.Key and the value with item.value // foreach (KeyValuePair<string, string> c in teacherNames) // { // Console.WriteLine($"The {c.Key} is {c.Value}"); // } // If you know specifically what you're looking for inside a dictionary, you can also do dictionaryName["keyName"] // Console.WriteLine(teacherNames["LeadInstructor"]); // Console.WriteLine(teacherNames["TA"]); // FRANKEN DATA!!!!! // A dictionary of hashsets and the hashsets have ints inside // AAAAAAHHHHH Dictionary <string, HashSet <int> > studentIds = new Dictionary <string, HashSet <int> >() { { "Ms. Jarell's Class", new HashSet <int>() { 199, 133, 400, 500 } }, { "Ms. Miller's Class", new HashSet <int>() { 6000, 203, 300, 200 } } }; // foreach (KeyValuePair<string, HashSet<int>> currentClass in studentIds) // { // string sentenceAboutClass = $"{currentClass.Key} has the following id's: "; // foreach (int studentId in currentClass.Value) // { // sentenceAboutClass += studentId + " "; // } // System.Console.WriteLine(sentenceAboutClass); // } //LISTS // This is a list of dictionaries // The dictionaries have strings in them List <Dictionary <string, string> > studentList = new List <Dictionary <string, string> >() { new Dictionary <string, string> { { "Jewel", "April 13" }, { "Michael", "June 25" } }, new Dictionary <string, string> { { "Ronnie", "April 42" }, { "Rachel", "June 100" } } }; // The first loop iterates through the dictionaries foreach (Dictionary <string, string> currentCohort in studentList) { // The nested loop iterates through the KeyValueParis INSIDE each dictionary foreach (KeyValuePair <string, string> currentStudent in currentCohort) { // Inside a key value pair, you can access the key with .Key and the value with .Value Console.WriteLine($"{currentStudent.Key}'s birthday is {currentStudent.Value}"); } } // THE END }
public override async Task <Move> getMove() { //Get all unit categories List <Coord> units = findAllUnits(); List <Coord> enemies = filterEnemies(units); List <Coord> allies = filterAllies(units); List <Coord> available = filterHasMove(allies); // Select a unit based on type Coord unitCoord = new Coord(0, 0); Unit curUnit = null; foreach (Coord coord in available) { Unit unit = battlefield.units[coord.x, coord.y]; if (unit is HealerUnit) { unitCoord = coord; curUnit = unit; break; } else if (unit is MeleeUnit) { unitCoord = coord; curUnit = unit; } else if (!(curUnit is MeleeUnit)) { unitCoord = coord; curUnit = unit; } } if (curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield).Count == 0 && curUnit.getTargets(unitCoord.x, unitCoord.y, battlefield, character).Count == 0) { return(new Move(unitCoord, unitCoord)); } if (VIPs.Contains(curUnit)) { // Debug.Log("VIP"); // Evade HashSet <Coord> dangerZone = enemyAttackZone(enemies); HashSet <Coord> safeZone = safeMoves(unitCoord, dangerZone); if (safeZone.Count > 0) { int bestScore = Int32.MaxValue; Coord bestCoord = null; foreach (Coord coord in safeZone) { int distScore = sumDistances(coord, allies); if (distScore < bestScore) { bestScore = distScore; bestCoord = coord; } } return(new Move(unitCoord, bestCoord)); } else { int bestScore = 0; Coord bestCoord = null; List <Coord> moves = curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield); moves.Add(unitCoord); foreach (Coord coord in moves) { int distScore = sumDistances(coord, enemies); if (distScore > bestScore) { bestScore = distScore; bestCoord = coord; } } return(new Move(unitCoord, bestCoord)); } } //Decide action based on type if (curUnit is HealerUnit) { Coord bestTarget = null; float bestScore = 0; if (curUnit.hasMovedThisTurn) { foreach (Coord target in curUnit.getTargets(unitCoord.x, unitCoord.y, battlefield, character)) { Unit targetUnit = battlefield.units[target.x, target.y]; float score = targetUnit.maxHealth / targetUnit.getHealth(); if (score > bestScore) { bestTarget = target; bestScore = score; } } return(new Move(unitCoord, bestTarget)); } if (curUnit.getHealth() < curUnit.maxHealth * -0.4) { // TODO // Flee } else { List <Coord> injured = filterInjured(allies, 0.6f); if (injured.Count > 0) { HashSet <Coord> attackZone = curUnit.getTotalAttackZone(unitCoord.x, unitCoord.y, battlefield, character); attackZone.IntersectWith(injured); List <Coord> targets = new List <Coord>(attackZone); if (targets.Count > 0) { bestTarget = null; bestScore = 0; foreach (Coord target in targets) { Unit targetUnit = battlefield.units[target.x, target.y]; float score = targetUnit.maxHealth / targetUnit.getHealth(); if (score > bestScore) { bestTarget = target; bestScore = score; } } // If unit has already moved heal best target if (curUnit.hasMovedThisTurn) { return(new Move(unitCoord, bestTarget)); } // Else choose best tile to move to HashSet <Coord> adjacentTiles = new HashSet <Coord>(); adjacentTiles.Add(new Coord(bestTarget.x + 1, bestTarget.y)); adjacentTiles.Add(new Coord(bestTarget.x - 1, bestTarget.y)); adjacentTiles.Add(new Coord(bestTarget.x, bestTarget.y + 1)); adjacentTiles.Add(new Coord(bestTarget.x, bestTarget.y - 1)); adjacentTiles.IntersectWith(curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield)); // Hardcoded hack for time efficiency if (manhattanDistance(unitCoord, bestTarget) == 1) { adjacentTiles.Add(unitCoord); } Coord bestCoord = null; int tileDef = Int32.MinValue; foreach (Coord coord in adjacentTiles) { if (tileDef < ConstantTables.TileDefense[(int)battlefield.map[coord.x, coord.y].Peek().tileType]) { tileDef = ConstantTables.TileDefense[(int)battlefield.map[coord.x, coord.y].Peek().tileType]; bestCoord = coord; } } if (unitCoord.Equals(bestCoord)) { return(new Move(unitCoord, bestTarget)); } return(new Move(unitCoord, bestCoord)); } else { // Find nearest injured and move to them bestTarget = nearestCoord(unitCoord, injured); Coord bestCoord = nearestCoord(bestTarget, curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield)); return(new Move(unitCoord, bestCoord)); } } else { // Evade around the capture point HashSet <Coord> dangerZone = enemyAttackZone(enemies); HashSet <Coord> safeZone = safeMoves(unitCoord, dangerZone); if (safeZone.Count > 0) { bestScore = Int32.MaxValue; Coord bestCoord = null; foreach (Coord coord in safeZone) { int distScore = manhattanDistance(capturePoint, coord); if (distScore < bestScore) { bestScore = distScore; bestCoord = coord; } } return(new Move(unitCoord, bestCoord)); } else { bestScore = 0; Coord bestCoord = null; List <Coord> moves = curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield); moves.Add(unitCoord); foreach (Coord coord in moves) { int distScore = manhattanDistance(capturePoint, coord); if (distScore > bestScore) { bestScore = distScore; bestCoord = coord; } } return(new Move(unitCoord, bestCoord)); } } } } else if (curUnit is MeleeUnit) { if (curUnit.getHealth() < curUnit.maxHealth * -0.4) { // TODO // Flee } else { // Seek an enemy to attack if health is high HashSet <Coord> attackZone = curUnit.getTotalAttackZone(unitCoord.x, unitCoord.y, battlefield, character); attackZone.IntersectWith(enemies); List <Coord> targets = new List <Coord>(attackZone); float bestScore = 0; Coord bestTarget = null; if (targets.Count > 0) { // If targets are in range find best foreach (Coord target in targets) { Tile enemyTile = battlefield.map[target.x, target.y].Peek(); Unit enemy = battlefield.units[target.x, target.y]; AIBattle battle = new AIBattle(curUnit, enemy, enemyTile, target, battlefield); if (battle.score > bestScore) { bestScore = battle.score; bestTarget = target; } } // If unit has already moved attack best target if (curUnit.hasMovedThisTurn) { return(new Move(unitCoord, bestTarget)); } // Else choose best tile to move to HashSet <Coord> adjacentTiles = new HashSet <Coord>(); adjacentTiles.Add(new Coord(bestTarget.x + 1, bestTarget.y)); adjacentTiles.Add(new Coord(bestTarget.x - 1, bestTarget.y)); adjacentTiles.Add(new Coord(bestTarget.x, bestTarget.y + 1)); adjacentTiles.Add(new Coord(bestTarget.x, bestTarget.y - 1)); adjacentTiles.IntersectWith(curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield)); // Hardcoded hack for time efficiency if (manhattanDistance(unitCoord, bestTarget) == 1) { adjacentTiles.Add(unitCoord); } Coord bestCoord = null; int tileDef = Int32.MinValue; foreach (Coord coord in adjacentTiles) { if (tileDef < ConstantTables.TileDefense[(int)battlefield.map[coord.x, coord.y].Peek().tileType]) { tileDef = ConstantTables.TileDefense[(int)battlefield.map[coord.x, coord.y].Peek().tileType]; bestCoord = coord; } } if (unitCoord.Equals(bestCoord)) { return(new Move(unitCoord, bestTarget)); } return(new Move(unitCoord, bestCoord)); } else { // Move towards defence point List <Coord> moves = curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield); moves.Add(unitCoord); Coord bestCoord = nearestCoord(capturePoint, moves); return(new Move(unitCoord, bestCoord)); } } } else if (curUnit is RangedUnit) { if (curUnit.getHealth() < curUnit.maxHealth * -0.4) { // TODO // Flee } else { // Seek an enemy to attack if health is high HashSet <Coord> attackZone = curUnit.getTotalAttackZone(unitCoord.x, unitCoord.y, battlefield, character); attackZone.IntersectWith(enemies); List <Coord> targets = new List <Coord>(attackZone); float bestScore = 0; Coord bestTarget = null; if (targets.Count > 0) { // If targets are in range find best foreach (Coord target in targets) { Tile enemyTile = battlefield.map[target.x, target.y].Peek(); Unit enemy = battlefield.units[target.x, target.y]; AIBattle battle = new AIBattle(curUnit, enemy, enemyTile, target, battlefield); if (battle.score > bestScore) { bestScore = battle.score; bestTarget = target; } } return(new Move(unitCoord, bestTarget)); } else { // Move towards defence point List <Coord> moves = curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield); moves.Add(unitCoord); Coord bestCoord = nearestCoord(capturePoint, moves); return(new Move(unitCoord, bestCoord)); } } } else if (curUnit is StatusUnit) { if (curUnit.getHealth() < curUnit.maxHealth * -0.4) { // TODO // Flee } else { // Seek an enemy to attack if health is high HashSet <Coord> attackZone = curUnit.getTotalAttackZone(unitCoord.x, unitCoord.y, battlefield, character); attackZone.IntersectWith(enemies); List <Coord> targets = new List <Coord>(attackZone); float bestScore = 0; Coord bestTarget = null; if (targets.Count > 0) { // If targets are in range find best foreach (Coord target in targets) { Tile enemyTile = battlefield.map[target.x, target.y].Peek(); Unit enemy = battlefield.units[target.x, target.y]; AIBattle battle = new AIBattle(curUnit, enemy, enemyTile, target, battlefield); if (battle.score > bestScore) { bestScore = battle.score; bestTarget = target; } } return(new Move(unitCoord, bestTarget)); } else { // Move towards defence point List <Coord> moves = curUnit.getValidMoves(unitCoord.x, unitCoord.y, battlefield); moves.Add(unitCoord); Coord bestCoord = nearestCoord(capturePoint, moves); return(new Move(unitCoord, bestCoord)); } } } //Just so the player can keep track of what's happening await Task.Delay(300); return(new Move()); }
private static TemplateGroupParameterDetails?DetermineParameterDispositionsForTemplateGroup(IReadOnlyList <ITemplateInfo> templateGroup, IEngineEnvironmentSettings environmentSettings, INewCommandInput commandInput, IHostSpecificDataLoader hostDataLoader, TemplateCreator templateCreator) { HashSet <string> groupUserParamsWithInvalidValues = new HashSet <string>(StringComparer.Ordinal); bool groupHasPostActionScriptRunner = false; List <IParameterSet> parameterSetsForAllTemplatesInGroup = new List <IParameterSet>(); IDictionary <string, InvalidParameterInfo> invalidParametersForGroup = new Dictionary <string, InvalidParameterInfo>(StringComparer.Ordinal); bool firstInList = true; Dictionary <string, IReadOnlyList <string> > defaultVariantsForCanonicals = new Dictionary <string, IReadOnlyList <string> >(StringComparer.Ordinal); Dictionary <string, IReadOnlyList <string> > groupVariantsForCanonicals = new Dictionary <string, IReadOnlyList <string> >(StringComparer.Ordinal); HashSet <string> groupUserParamsWithDefaultValues = new HashSet <string>(StringComparer.Ordinal); Dictionary <string, bool> parameterHidingDisposition = new Dictionary <string, bool>(StringComparer.OrdinalIgnoreCase); HashSet <string> parametersToAlwaysShow = new HashSet <string>(StringComparer.Ordinal); foreach (ITemplateInfo templateInfo in templateGroup.OrderByDescending(x => x.Precedence)) { TemplateUsageInformation?usageInformationNullable = TemplateUsageHelp.GetTemplateUsageInformation(templateInfo, environmentSettings, commandInput, hostDataLoader, templateCreator); if (usageInformationNullable == null) { return(null); } TemplateUsageInformation usageInformation = usageInformationNullable.Value; HostSpecificTemplateData hostSpecificTemplateData = hostDataLoader.ReadHostSpecificTemplateData(templateInfo); HashSet <string> parametersToExplicitlyHide = hostSpecificTemplateData?.HiddenParameterNames ?? new HashSet <string>(StringComparer.Ordinal); foreach (ITemplateParameter parameter in usageInformation.AllParameters.ParameterDefinitions) { //If the parameter has previously been encountered... if (parameterHidingDisposition.TryGetValue(parameter.Name, out bool isCurrentlyHidden)) { //...and it was hidden, but it's not hidden in this template in the group, // remove its hiding, otherwise leave it as is if (isCurrentlyHidden && !parametersToExplicitlyHide.Contains(parameter.Name)) { parameterHidingDisposition[parameter.Name] = false; } } else { //...otherwise, since this is the first time the parameter has been seen, // its hiding state should be used as the current disposition parameterHidingDisposition[parameter.Name] = parametersToExplicitlyHide.Contains(parameter.Name); } } if (firstInList) { invalidParametersForGroup = usageInformation.InvalidParameters.ToDictionary(x => x.Canonical, x => x); firstInList = false; } else { invalidParametersForGroup = InvalidParameterInfo.IntersectWithExisting(invalidParametersForGroup, usageInformation.InvalidParameters); } groupUserParamsWithInvalidValues.IntersectWith(usageInformation.UserParametersWithInvalidValues); // intersect because if the value is valid for any version, it's valid. groupHasPostActionScriptRunner |= usageInformation.HasPostActionScriptRunner; parameterSetsForAllTemplatesInGroup.Add(usageInformation.AllParameters); // If this template has name overrides (either long or short), it's opinionated. // If it's the first opinionated template about the param, use its variants. // Else this template is not opinionated, note its values if there aren't defaults for the param already. // At the end, anything in the default list that isn't in the opinionated list will get merged in. // TODO: write tests for this code (and the rest of this method while we're at it) foreach (KeyValuePair <string, IReadOnlyList <string> > canonicalAndVariants in usageInformation.VariantsForCanonicals) { if (hostSpecificTemplateData.LongNameOverrides.ContainsKey(canonicalAndVariants.Key) || hostSpecificTemplateData.ShortNameOverrides.ContainsKey(canonicalAndVariants.Key)) { // this template is opinionated about this parameter. If no previous template is opinionated about this param, use this template's variants. if (!groupVariantsForCanonicals.ContainsKey(canonicalAndVariants.Key)) { groupVariantsForCanonicals[canonicalAndVariants.Key] = canonicalAndVariants.Value; } } else { // this template is not opinionated about this parameter. If no previous template had defaults for this param, use this template's defaults. if (!defaultVariantsForCanonicals.ContainsKey(canonicalAndVariants.Key)) { defaultVariantsForCanonicals[canonicalAndVariants.Key] = canonicalAndVariants.Value; } } } // If any template says the user input value is the default, include it here. groupUserParamsWithDefaultValues.UnionWith(usageInformation.UserParametersWithDefaultValues); parametersToAlwaysShow.UnionWith(hostSpecificTemplateData.ParametersToAlwaysShow); } // aggregate the parameter variants foreach (KeyValuePair <string, IReadOnlyList <string> > defaultVariants in defaultVariantsForCanonicals) { if (!groupVariantsForCanonicals.ContainsKey(defaultVariants.Key)) { // there were no opinionated variants, take the preferred default. groupVariantsForCanonicals[defaultVariants.Key] = defaultVariants.Value; } } IParameterSet allGroupParameters = new TemplateGroupParameterSet(parameterSetsForAllTemplatesInGroup); string parameterErrors = InvalidParameterInfo.InvalidParameterListToString(invalidParametersForGroup.Values.ToList()); HashSet <string> parametersToHide = new HashSet <string>(parameterHidingDisposition.Where(x => x.Value).Select(x => x.Key), StringComparer.Ordinal); return(new TemplateGroupParameterDetails { AllParams = allGroupParameters, AdditionalInfo = parameterErrors, InvalidParams = groupUserParamsWithInvalidValues.ToList(), ExplicitlyHiddenParams = parametersToHide, GroupVariantsForCanonicals = groupVariantsForCanonicals, GroupUserParamsWithDefaultValues = groupUserParamsWithDefaultValues, HasPostActionScriptRunner = groupHasPostActionScriptRunner, ParametersToAlwaysShow = parametersToAlwaysShow, }); }
IReadOnlyList <IType> FindTypesInBounds(IReadOnlyList <IType> lowerBounds, IReadOnlyList <IType> upperBounds) { // If there's only a single type; return that single type. // If both inputs are empty, return the empty list. if (lowerBounds.Count == 0 && upperBounds.Count <= 1) { return(upperBounds); } if (upperBounds.Count == 0 && lowerBounds.Count <= 1) { return(lowerBounds); } if (nestingLevel > maxNestingLevel) { return(EmptyList <IType> .Instance); } // Finds a type X so that "LB <: X <: UB" Log.WriteCollection("FindTypesInBound, LowerBounds=", lowerBounds); Log.WriteCollection("FindTypesInBound, UpperBounds=", upperBounds); // First try the Fixing algorithm from the C# spec (§7.5.2.11) List <IType> candidateTypes = lowerBounds.Union(upperBounds) .Where(c => lowerBounds.All(b => conversions.ImplicitConversion(b, c).IsValid)) .Where(c => upperBounds.All(b => conversions.ImplicitConversion(c, b).IsValid)) .ToList(); // evaluate the query only once Log.WriteCollection("FindTypesInBound, Candidates=", candidateTypes); // According to the C# specification, we need to pick the most specific // of the candidate types. (the type which has conversions to all others) // However, csc actually seems to choose the least specific. candidateTypes = candidateTypes.Where( c => candidateTypes.All(o => conversions.ImplicitConversion(o, c).IsValid) ).ToList(); // If the specified algorithm produces a single candidate, we return // that candidate. // We also return the whole candidate list if we're not using the improved // algorithm. if (candidateTypes.Count == 1 || !(algorithm == TypeInferenceAlgorithm.Improved || algorithm == TypeInferenceAlgorithm.ImprovedReturnAllResults)) { return(candidateTypes); } candidateTypes.Clear(); // Now try the improved algorithm Log.Indent(); List <ITypeDefinition> candidateTypeDefinitions; if (lowerBounds.Count > 0) { // Find candidates by using the lower bounds: var hashSet = new HashSet <ITypeDefinition>(lowerBounds[0].GetAllBaseTypeDefinitions()); for (int i = 1; i < lowerBounds.Count; i++) { hashSet.IntersectWith(lowerBounds[i].GetAllBaseTypeDefinitions()); } candidateTypeDefinitions = hashSet.ToList(); } else { // Find candidates by looking at all classes in the project: candidateTypeDefinitions = compilation.GetAllTypeDefinitions().ToList(); } // Now filter out candidates that violate the upper bounds: foreach (IType ub in upperBounds) { ITypeDefinition ubDef = ub.GetDefinition(); if (ubDef != null) { candidateTypeDefinitions.RemoveAll(c => !c.IsDerivedFrom(ubDef)); } } foreach (ITypeDefinition candidateDef in candidateTypeDefinitions) { // determine the type parameters for the candidate: IType candidate; if (candidateDef.TypeParameterCount == 0) { candidate = candidateDef; } else { Log.WriteLine("Inferring arguments for candidate type definition: " + candidateDef); bool success; IType[] result = InferTypeArgumentsFromBounds( candidateDef.TypeParameters, new ParameterizedType(candidateDef, candidateDef.TypeParameters), lowerBounds, upperBounds, out success); if (success) { candidate = new ParameterizedType(candidateDef, result); } else { Log.WriteLine("Inference failed; ignoring candidate"); continue; } } Log.WriteLine("Candidate type: " + candidate); if (upperBounds.Count == 0) { // if there were only lower bounds, we aim for the most specific candidate: // if this candidate isn't made redundant by an existing, more specific candidate: if (!candidateTypes.Any(c => c.GetDefinition().IsDerivedFrom(candidateDef))) { // remove all existing candidates made redundant by this candidate: candidateTypes.RemoveAll(c => candidateDef.IsDerivedFrom(c.GetDefinition())); // add new candidate candidateTypes.Add(candidate); } } else { // if there were upper bounds, we aim for the least specific candidate: // if this candidate isn't made redundant by an existing, less specific candidate: if (!candidateTypes.Any(c => candidateDef.IsDerivedFrom(c.GetDefinition()))) { // remove all existing candidates made redundant by this candidate: candidateTypes.RemoveAll(c => c.GetDefinition().IsDerivedFrom(candidateDef)); // add new candidate candidateTypes.Add(candidate); } } } Log.Unindent(); return(candidateTypes); }
internal override PivotsExpression ToPivotsExpression(Dictionary <string, Pivots.Pivot> pivotVsPivotValues, GetChoiceDelegate get_choice_fn) { PivotsExpression left, right; left = left_child.ToPivotsExpression(pivotVsPivotValues, get_choice_fn); if (left.invariant) { if (left.invariant_result) { return(right_child.ToPivotsExpression(pivotVsPivotValues, get_choice_fn)); } else { return(left); } } right = right_child.ToPivotsExpression(pivotVsPivotValues, get_choice_fn); if (right.invariant) { if (right.invariant_result) { return(left); } else { return(right); } } HashSet <string> shared_pivots = new HashSet <string>(left.relevant_pivots); shared_pivots.IntersectWith(right.relevant_pivots); if (shared_pivots.Count == left.relevant_pivots.Count) { // All pivots are shared, just intersect the sets. PivotsExpression result = new PivotsExpression(); result.relevant_pivots = left.relevant_pivots; result.matching_combinations = new ComparableHashSet <ComparableHashSet <string> >(left.matching_combinations); result.matching_combinations.IntersectWith(right.matching_combinations); return(result.Simplify(pivotVsPivotValues)); } if (shared_pivots.Count == 0) { // No shared pivots, so do a cross product PivotsExpression result = new PivotsExpression(); result.relevant_pivots = new HashSet <string>(left.relevant_pivots); result.relevant_pivots.UnionWith(right.relevant_pivots); result.matching_combinations = new ComparableHashSet <ComparableHashSet <string> >(); foreach (var left_combination in left.matching_combinations) { foreach (var right_combination in right.matching_combinations) { ComparableHashSet <string> new_combination = new ComparableHashSet <string>(left_combination); new_combination.UnionWith(right_combination); result.matching_combinations.Add(new_combination); } } // It shouldn't be necessary to simplify in this case, as any independent pivots should have been removed already return(result); } HashSet <string> shared_pivot_values = new HashSet <string>(); foreach (string pivot in shared_pivots) { shared_pivot_values.UnionWith(pivotVsPivotValues[pivot].Choices.Keys); } // Sort by relevant pivot count if (left.relevant_pivots.Count > right.relevant_pivots.Count) { var tmp = left; left = right; right = tmp; } if (right.relevant_pivots.IsSupersetOf(left.relevant_pivots)) { // Filter the combintions in right by what's in left. PivotsExpression result = new PivotsExpression(); result.relevant_pivots = right.relevant_pivots; result.matching_combinations = new ComparableHashSet <ComparableHashSet <string> >(); foreach (var right_combination in right.matching_combinations) { ComparableHashSet <string> reduced_combination = new ComparableHashSet <string>(right_combination); reduced_combination.IntersectWith(shared_pivot_values); if (left.matching_combinations.Contains(reduced_combination)) { result.matching_combinations.Add(right_combination); } } return(result.Simplify(pivotVsPivotValues)); } else { Dictionary <ComparableHashSet <string>, List <ComparableHashSet <string> > > shared_values_to_left_values = new Dictionary <ComparableHashSet <string>, List <ComparableHashSet <string> > >(); foreach (var left_combination in left.matching_combinations) { ComparableHashSet <string> shared_values = new ComparableHashSet <string>(); foreach (var value in left_combination) { if (shared_pivot_values.Contains(value)) { shared_values.Add(value); } } List <ComparableHashSet <string> > combination_list; if (!shared_values_to_left_values.TryGetValue(shared_values, out combination_list)) { combination_list = shared_values_to_left_values[shared_values] = new List <ComparableHashSet <string> >(); } combination_list.Add(left_combination); } PivotsExpression result = new PivotsExpression(); result.relevant_pivots = new HashSet <string>(left.relevant_pivots); result.relevant_pivots.UnionWith(right.relevant_pivots); result.matching_combinations = new ComparableHashSet <ComparableHashSet <string> >(); foreach (var right_combination in right.matching_combinations) { ComparableHashSet <string> shared_values = new ComparableHashSet <string>(); foreach (var value in right_combination) { if (shared_pivot_values.Contains(value)) { shared_values.Add(value); } } List <ComparableHashSet <string> > left_combinations; if (shared_values_to_left_values.TryGetValue(shared_values, out left_combinations)) { foreach (var left_combination in left_combinations) { ComparableHashSet <string> new_combination = new ComparableHashSet <string>(right_combination); new_combination.UnionWith(left_combination); result.matching_combinations.Add(new_combination); } } } return(result.Simplify(pivotVsPivotValues)); } }
static void Main(string[] args) { //Implementation with HashTable - HashSet<T> HashSet <string> aspNetStudents = new HashSet <string>(); aspNetStudents.Add("S. Jobs"); aspNetStudents.Add("B. Gates"); aspNetStudents.Add("M. Dell"); HashSet <string> silverlightStudents = new HashSet <string>(); silverlightStudents.Add("M. Zuckerberg"); silverlightStudents.Add("M. Dell"); HashSet <string> allStudents = new HashSet <string>(); allStudents.UnionWith(aspNetStudents); allStudents.UnionWith(silverlightStudents); HashSet <string> intersectStudents = new HashSet <string>(aspNetStudents); intersectStudents.IntersectWith(silverlightStudents); Console.WriteLine("ASP.NET students: " + string.Join(", ", aspNetStudents)); Console.WriteLine("Silverlight students: " + string.Join(", ", silverlightStudents)); Console.WriteLine("All students: " + string.Join(", ", allStudents)); Console.WriteLine( "Students in both ASP.NET and Silverlight: " + string.Join(", ", intersectStudents)); //implementation with Red Black Tree-SortedSet<T> SortedSet <string> bandsBradLikes = new SortedSet <string>(new[] { "Manowar", "Blind Guardian", "Dio", "Kiss", "Dream Theater", "Megadeth", "Judas Priest", "Kreator", "Iron Maiden", "Accept" }); SortedSet <string> bandsAngelinaLikes = new SortedSet <string>(new[] { "Iron Maiden", "Dio", "Accept", "Manowar", "Slayer", "Megadeth", "Running Wild", "Grave Gigger", "Metallica" }); Console.Write("Brad Pitt likes these bands: "); Console.WriteLine(string.Join(", ", bandsBradLikes)); Console.Write("Angelina Jolie likes these bands: "); Console.WriteLine(string.Join(",", bandsAngelinaLikes)); SortedSet <string> intersectBands = new SortedSet <string>(bandsBradLikes); intersectBands.IntersectWith(bandsAngelinaLikes); Console.WriteLine(string.Format( "Does Brad Pitt like Angelina Jolie? {0}", intersectBands.Count >= 5 ? "Yes!" : "No!")); Console.Write("Because Brad Pitt and Angelina Jolie both like: "); Console.WriteLine(string.Join(", ", intersectBands)); SortedSet <string> unionBands = new SortedSet <string>(bandsBradLikes); unionBands.UnionWith(bandsAngelinaLikes); Console.Write( "All bands hat Brad Pitt or Angelina Jolie like: "); Console.WriteLine(string.Join(", ", unionBands)); Console.ReadLine(); }
private Expression Bind(ParameterExpression node, bool isLval) { if (_bindings.TryGetValue(node, out var binding)) { if (isLval) { Finish(Status.HasLvalBinding); return(node); } if (_inQuote) { // // Occurrences of variables in quotes result in the present of StrongBox<T> instances // in Constant nodes. We don't want to take these away. // Finish(Status.HasBindingInQuote); return(node); } if (_inLambda && !binding.IsConstant) { // // Bindings in lambdas are not guaranteed to run, so if the binding has side-effects, we // can't inline it because the evaluation count is non-deterministic. We can make an // exception for constants and default values. // // NB: We make the check here a bit stronger than IsPure to avoid inlining parameters that // are defined in a higher scope. Putting these within a nested lambda would cause their // repeated lookup at runtime (for each lambda invocation) which can be an unsafe thing // to do. // // Example: f = (x => () => x)(y); // y++; // f(); // // REVIEW: Is it generally safe to inline variables? // Finish(Status.HasBindingInLambda); return(node); } if (binding.UsageCount > 0 || _mayRepeat) { if (!binding.IsPure || !binding.CanRepeat) { Finish(Status.RepeatsArgument); return(node); } } if (!binding.IsPure) { if (_bindingOrder[_nextInPureBindingIndex] != node) { Finish(Status.ChangesSideEffectOrder); return(node); } _nextInPureBindingIndex++; } if (binding.FreeVariables != null) { var captured = new HashSet <ParameterExpression>(_environment.SelectMany(scope => scope)); captured.IntersectWith(binding.FreeVariables); if (captured.Count > 0) { // // CONSIDER: We could perform alpha renaming to resolve this issue. For now, we'll // just bail out. // Finish(Status.CausesCapture); return(node); } } binding.UsageCount++; return(binding.Argument); } return(node); }
/// <summary> /// Produces the estimator. Note that this is made out of <see cref="ReconcileCore(IHostEnvironment, string[])"/>'s /// return value, plus whatever usages of <see cref="ColumnCopyingEstimator"/> are necessary to avoid collisions with /// the output names fed to the constructor. This class provides the implementation, and subclasses should instead /// override <see cref="ReconcileCore(IHostEnvironment, string[])"/>. /// </summary> public sealed override IEstimator <ITransformer> Reconcile(IHostEnvironment env, PipelineColumn[] toOutput, IReadOnlyDictionary <PipelineColumn, string> inputNames, IReadOnlyDictionary <PipelineColumn, string> outputNames, IReadOnlyCollection <string> usedNames) { Contracts.AssertValue(env); env.AssertValue(toOutput); env.AssertValue(inputNames); env.AssertValue(outputNames); env.AssertValue(usedNames); // The reconciler should have been called with all the input columns having names. env.Assert(inputNames.Keys.All(Inputs.Contains) && Inputs.All(inputNames.Keys.Contains)); // The output name map should contain only outputs as their keys. Yet, it is possible not all // outputs will be required in which case these will both be subsets of those outputs indicated // at construction. env.Assert(outputNames.Keys.All(Outputs.Contains)); env.Assert(toOutput.All(Outputs.Contains)); env.Assert(Outputs.Count() == _outputNames.Length); IEstimator <ITransformer> result = null; // In the case where we have names used that conflict with the fixed output names, we must have some // renaming logic. var collisions = new HashSet <string>(_outputNames); collisions.IntersectWith(usedNames); var old2New = new Dictionary <string, string>(); if (collisions.Count > 0) { // First get the old names to some temporary names. int tempNum = 0; foreach (var c in collisions) { old2New[c] = $"#TrainTemp{tempNum++}"; } // In the case where the input names have anything that is used, we must reconstitute the input mapping. if (inputNames.Values.Any(old2New.ContainsKey)) { var newInputNames = new Dictionary <PipelineColumn, string>(); foreach (var p in inputNames) { newInputNames[p.Key] = old2New.ContainsKey(p.Value) ? old2New[p.Value] : p.Value; } inputNames = newInputNames; } result = new ColumnCopyingEstimator(env, old2New.Select(p => (p.Value, p.Key)).ToArray()); } // Map the inputs to the names. string[] mappedInputNames = Inputs.Select(c => inputNames[c]).ToArray(); // Finally produce the trainer. var trainerEst = ReconcileCore(env, mappedInputNames); if (result == null) { result = trainerEst; } else { result = result.Append(trainerEst); } // OK. Now handle the final renamings from the fixed names, to the desired names, in the case // where the output was desired, and a renaming is even necessary. var toRename = new List <(string outputColumnName, string inputColumnName)>(); foreach ((PipelineColumn outCol, string fixedName) in Outputs.Zip(_outputNames, (c, n) => (c, n))) { if (outputNames.TryGetValue(outCol, out string desiredName)) { toRename.Add((desiredName, fixedName)); } else { env.Assert(!toOutput.Contains(outCol)); } } // Finally if applicable handle the renaming back from the temp names to the original names. foreach (var p in old2New) { toRename.Add((p.Key, p.Value)); } if (toRename.Count > 0) { result = result.Append(new ColumnCopyingEstimator(env, toRename.ToArray())); } return(result); }