/// <summary> /// Returns the possible ways that a query term can unify with a program term /// </summary> public static IBindings Unify(this ILiteral query, ILiteral program, IBindings bindings = null) { var simpleUnifier = new SimpleUnifier(); var freeVariables = new HashSet<ILiteral>(); // Run the unifier var queryFreeVars = simpleUnifier.QueryUnifier.Compile(query, bindings); if (queryFreeVars == null) return null; simpleUnifier.PrepareToRunProgram(); var programFreeVars = simpleUnifier.ProgramUnifier.Compile(program, bindings); if (programFreeVars == null) return null; freeVariables.UnionWith(queryFreeVars); // Retrieve the unified value for the program var result = simpleUnifier.UnifiedValue(query.UnificationKey ?? query); // If the result was valid, return as the one value from this function if (result != null) { var variableBindings = freeVariables.ToDictionary(variable => variable, variable => simpleUnifier.UnifiedValue(variable)); return new BasicBinding(result, variableBindings); } else { return null; } }
public Network(NetworkBuilder builder, StateOptimizer optimizer, IEnumerable<RuleBinding> rules, Dictionary<int, RuleBinding> stateRules, Dictionary<int, RuleBinding> contextRules) { Contract.Requires<ArgumentNullException>(builder != null, "builder"); Contract.Requires<ArgumentNullException>(optimizer != null, "optimizer"); Contract.Requires<ArgumentNullException>(rules != null, "rules"); _builder = builder; _rules = new List<RuleBinding>(rules); //Dictionary<int, string> stateRules = new Dictionary<int, string>(); //foreach (var rule in _rules) //{ // stateRules[rule.StartState.Id] = rule.Name; // stateRules[rule.EndState.Id] = rule.Name; //} HashSet<State> states = new HashSet<State>(ObjectReferenceEqualityComparer<State>.Default); HashSet<Transition> transitions = new HashSet<Transition>(ObjectReferenceEqualityComparer<Transition>.Default); foreach (var rule in _rules) { ExtractStatesAndTransitions(optimizer, rule, rule.StartState, states, transitions, stateRules, contextRules); //ExtractStatesAndTransitions(rule.Name, rule.EndState, states, transitions, stateRules, contextRules); } _states = states.ToDictionary(i => i.Id); _transitions = new List<Transition>(transitions); _stateRules = stateRules; _contextRules = contextRules; _optimizer = optimizer; }
/// <summary> /// Erstellt eine neue Option, die einen der angegebenen Werte aus validValues annehmen kann, mit dem angegebenen Namen in dem /// angegebenen Abschnitt der angegebenen Einstellungsdatei. /// [base=section, name, defaultValue, configFile] /// </summary> public DistinctOption(string section, string name, string defaultValue, IEnumerable<string> validValues, ConfigFile configFile) : base(section, name, defaultValue, configFile) { ValidValues = new HashSet<string> (validValues); ValidValues.Add (defaultValue); DisplayValidValues = new Dictionary<string,string> (ValidValues.ToDictionary (x=>x,x=>x)); }
private void Initialize() { _fileInfo = new HashSet<PathInfo>( Directory.EnumerateFiles(_basePathConverted, "*", SearchOption.AllDirectories) .Select(fileName => PathInfo.GetSubPath(_basePath, fileName))); _fileInfoCache = _fileInfo.ToDictionary(i => GetRootPath(i), i => FileInfo.Create(GetRootPath(i))); _directoryInfo = new HashSet<PathInfo>( Directory.EnumerateDirectories(_basePathConverted, "*", SearchOption.AllDirectories) .Select(fileName => PathInfo.GetSubPath(_basePath, fileName))); }
private Dictionary<KspPartObject, List<KspPartLinkProperty>> FindPartDependencies (KspCraftObject craft, RegexFilter filter) { ui.DisplayUserMessage ($"Entering craft '{craft.Name}'..."); var partLookup = new PartLookup (craft); var dependencies = partLookup.LookupParts (filter).ToList (); var dependentParts = new HashSet<KspPartObject> (); Parallel.ForEach (dependencies, dependency => { foreach (var part in partLookup.LookupSoftDependencies (dependency)) { lock (dependentParts) { dependentParts.Add (part); } } }); ui.DisplayUserMessage ($"Found {dependentParts.Count} dependent parts"); return dependentParts.ToDictionary (part => part, part => FindPartLinks (part, dependencies)); }
internal static Dictionary<string, DocumentedNamespaceTree> Build(IEnumerable<DocumentedNamespace> @namespaces) { var result = new HashSet<DocumentedNamespaceTree>(); foreach (var ns in namespaces) { result.Add(new DocumentedNamespaceTree(ns)); } foreach (var tree in result) { var parts = tree.Namespace.Identity.Split(new[] { '.' }, StringSplitOptions.RemoveEmptyEntries); var prefix = string.Join(".", parts.Take(parts.Length - 1)); foreach (var other in result) { if (other.Namespace.Identity == prefix) { other.AddChild(tree); } } } return result.ToDictionary(x => x.Namespace.Identity, x => x, StringComparer.Ordinal); }
public static SqlPreCommand SynchronizeSchemasScript(Replacements replacements) { HashSet<SchemaName> model = Schema.Current.GetDatabaseTables().Select(a => a.Name.Schema).ToHashSet(); HashSet<SchemaName> database = new HashSet<SchemaName>(); foreach (var db in model.Select(a => a.Database).Distinct()) { using (Administrator.OverrideDatabaseInViews(db)) { database.AddRange( from s in Database.View<SysSchemas>() select new SchemaName(db, s.name)); } } return Synchronizer.SynchronizeScript( model.ToDictionary(a => a), database.ToDictionary(a => a), (_, newSN) => SqlBuilder.CreateSchema(newSN), null, null, Spacing.Simple); }
public Dictionary<Webpage, HashSet<string>> GetSecondary(HashSet<Webpage> webpages) { if (!webpages.Any()) return new Dictionary<Webpage, HashSet<string>>(); Tag tagAlias = null; TagInfo tagInfo = null; string typeName = webpages.First().DocumentType; Dictionary<int, IEnumerable<string>> tagInfoDictionary = _session.QueryOver<Webpage>() .Where(webpage => webpage.DocumentType == typeName) .JoinAlias(webpage => webpage.Tags, () => tagAlias) .SelectList(builder => builder.Select(webpage => webpage.Id).WithAlias(() => tagInfo.WebpageId) .Select(() => tagAlias.Name).WithAlias(() => tagInfo.TagName)) .TransformUsing(Transformers.AliasToBean<TagInfo>()).List<TagInfo>() .GroupBy(info => info.WebpageId) .ToDictionary(infos => infos.Key, infos => infos.Select(x => x.TagName)); return webpages.ToDictionary(webpage => webpage, webpage => GetSecondaryTerms(webpage, tagInfoDictionary.ContainsKey(webpage.Id) ? tagInfoDictionary[webpage.Id] : Enumerable.Empty<string>()).ToHashSet()); }
public static SqlPreCommand SynchronizeSchemasScript(Replacements replacements) { HashSet<SchemaName> model = Schema.Current.GetDatabaseTables().Select(a => a.Name.Schema).Where(a => !SqlBuilder.SystemSchemas.Contains(a.Name)).ToHashSet(); HashSet<SchemaName> database = new HashSet<SchemaName>(); foreach (var db in Schema.Current.DatabaseNames()) { using (Administrator.OverrideDatabaseInSysViews(db)) { var schemaNames = Database.View<SysSchemas>().Select(s => s.name).ToList().Except(SqlBuilder.SystemSchemas); database.AddRange(schemaNames.Select(sn => new SchemaName(db, sn))); } } using (replacements.WithReplacedDatabaseName()) return Synchronizer.SynchronizeScriptReplacing(replacements, "Schemas", model.ToDictionary(a => a.ToString()), database.ToDictionary(a => a.ToString()), (_, newSN) => SqlBuilder.CreateSchema(newSN), (_, oldSN) => DropSchema(oldSN) ? SqlBuilder.DropSchema(oldSN) : null, (_, newSN, oldSN) => newSN.Equals(oldSN) ? null : SqlPreCommand.Combine(Spacing.Simple, SqlBuilder.DropSchema(oldSN), SqlBuilder.CreateSchema(newSN)), Spacing.Double); }
// constructors private SubqueryRemover(IEnumerable<SelectExpression> selectsToRemove) { _selectsToRemove = new HashSet<SelectExpression>(selectsToRemove); _map = _selectsToRemove.ToDictionary(d => d.Alias, d => d.Columns.ToDictionary(d2 => d2.Name, d2 => d2.Expression)); }
private HashSet<State> GetLiveStates(HashSet<State> states) { var dictionary = states.ToDictionary(s => s, s => new HashSet<State>()); foreach (State s in states) { foreach (Transition t in s.Transitions) { // TODO: Java code does not check for null states. if (t.To == null) { continue; } dictionary[t.To].Add(s); } } var comparer = new StateEqualityComparer(); var live = new HashSet<State>(this.GetAcceptStates(), comparer); var worklist = new LinkedList<State>(live); while (worklist.Count > 0) { State s = worklist.RemoveAndReturnFirst(); foreach (State p in dictionary[s]) { if (!live.Contains(p)) { live.Add(p); worklist.AddLast(p); } } } return live; }
public void Spawn() { PreventTurrets(); TakeOutTurrets(); var units = new HashSet<Unit> { Unit.CLAW, Unit.ARCHER, Unit.HACKER, Unit.REPAIRER, Unit.TERMINATOR, Unit.TURRET }; var ourUnitCounts = units.ToDictionary(u => u, u => 0); var theirUnitCounts = units.ToDictionary(u => u, u => 0); droids.Where(d => units.Contains((Unit)d.Variant)).ForEach(d => { if (d.Owner == Bb.id) ourUnitCounts[(Unit)d.Variant]++; else theirUnitCounts[(Unit)d.Variant]++; }); var theirTotal = theirUnitCounts.Sum(kvp => kvp.Value); var isSpawningTurretsSoon = Bb.TheirSpawning.ToPoints().Any(p => (Unit)Bb.TileLookup[p].VariantToAssemble == Unit.TURRET && Bb.TileLookup[p].TurnsUntilAssembled < 10); float targetClawRatio = theirTotal == 0 ? 1.0f : 0.0f; float targetHackerRatio = theirUnitCounts[Unit.HACKER] + theirUnitCounts[Unit.CLAW]; float targetArcherRatio = theirUnitCounts[Unit.ARCHER] + theirUnitCounts[Unit.TURRET] + (isSpawningTurretsSoon ? 5 : 0); float targetTerminatorRatio = theirUnitCounts[Unit.ARCHER] + theirUnitCounts[Unit.TERMINATOR] + theirUnitCounts[Unit.CLAW] + theirUnitCounts[Unit.REPAIRER]; float targetRepairerRatio = theirUnitCounts[Unit.ARCHER] / 4 + theirUnitCounts[Unit.CLAW] / 4 + ourUnitCounts[Unit.TERMINATOR]; var total = targetClawRatio + targetHackerRatio + targetArcherRatio + targetTerminatorRatio + targetRepairerRatio; targetHackerRatio /= total; targetArcherRatio /= total; targetTerminatorRatio /= total; targetRepairerRatio /= total; float unitCount = Bb.OurUnits.Count() - Bb.OurHangars.Count() - Bb.OurTurrets.Count() - Bb.OurWalls.Count() + .0001f; int clawCount = Bb.OurClaws.Count(); int archerCount = Bb.OurArchers.Count(); int hackerCount = Bb.OurHackers.Count(); int terminatorCount = Bb.OurTerminators.Count(); int repairerCount = Bb.OurRepairers.Count(); if (CanAfford(Unit.CLAW) && clawCount / unitCount < targetClawRatio) { SpawnUnit(Unit.CLAW); Bb.ReadBoard(); } if (CanAfford(Unit.TERMINATOR) && terminatorCount / unitCount < targetTerminatorRatio) { SpawnUnit(Unit.TERMINATOR); Bb.ReadBoard(); } if (CanAfford(Unit.HACKER) && hackerCount / unitCount < targetHackerRatio) { SpawnUnit(Unit.HACKER); Bb.ReadBoard(); } if (CanAfford(Unit.ARCHER) && archerCount / unitCount < targetArcherRatio) { SpawnUnit(Unit.ARCHER); Bb.ReadBoard(); } if (CanAfford(Unit.REPAIRER) && repairerCount / unitCount < targetRepairerRatio) { SpawnUnit(Unit.REPAIRER); Bb.ReadBoard(); } }
public static String Replace(this String input, String pattern, Func<ReadOnlyDictionary<String, String>, String> replacer, RegexOptions options) { if (input == null) return null; var names = new HashSet<String>(); var m_meta = Regex.Match(pattern, @"\(\?\<(?<name>.*?)\>"); for (; m_meta.Success; m_meta = m_meta.NextMatch()) { var name = m_meta.Result("${name}"); names.Add(name); } return Regex.Replace(input, pattern, m => replacer(names.ToDictionary(name => name, name => m.Result("${" + name + "}")).ToReadOnly()), options); }
public Dictionary<Layout, HashSet<string>> GetSecondary(HashSet<Layout> layouts) { return layouts.ToDictionary(layout => layout, layout => GetSecondary(layout).ToHashSet()); }
public Dictionary<MediaCategory, HashSet<string>> GetSecondary(HashSet<MediaCategory> mediaCategories) { return mediaCategories.ToDictionary(category => category, category => GetSecondary(category).ToHashSet()); }
private IEnumerable<string> GetNamePointPath(string from, string to, IEnumerable<PointGroup> roads) { var routes = new List<PointGroup>(); var cities = new HashSet<string>(); foreach (var road in roads) { cities.Add(road.Point1); cities.Add(road.Point2); routes.Add(new PointGroup { Point1 = road.Point1, Point2 = road.Point2, Lenght = road.Lenght }); routes.Add(new PointGroup { Point1 = road.Point2, Point2 = road.Point1, Lenght = road.Lenght }); } var shortestPaths = cities.ToDictionary(city => city, city => new Tuple<double, List<PointGroup>>(int.MaxValue, new List<PointGroup>())); shortestPaths[from] = new Tuple<double, List<PointGroup>>(0, new List<PointGroup>()); bool finish = false; var processed = new List<string>(); while (processed.Count != cities.Count) { var shortestCities = (from s in shortestPaths orderby s.Value.Item1 select s.Key).ToList(); string currentCity = null; foreach (string city in shortestCities) { if (!processed.Contains(city)) { if (shortestPaths[city].Item1 == int.MaxValue) { finish = true; } currentCity = city; break; } } if (finish) { break; } var citiesToGo = routes.Where(route => route.Point1 == currentCity); foreach (PointGroup cityToGo in citiesToGo) { if (shortestPaths[cityToGo.Point2].Item1 > cityToGo.Lenght + shortestPaths[cityToGo.Point1].Item1) { List<PointGroup> sss = shortestPaths[cityToGo.Point1].Item2.ToList(); sss.Add(cityToGo); var cost = cityToGo.Lenght + shortestPaths[cityToGo.Point1].Item1; shortestPaths[cityToGo.Point2] = new Tuple<double, List<PointGroup>>(cost, sss); } } processed.Add(currentCity); } try { try { if (shortestPaths[to].Item1 == int.MaxValue) { return new List<string>(); } } catch { Debug.Log(to); throw; } } catch (Exception e) { Debug.Log(to); throw e; } var shortPath = shortestPaths[to].Item2.Select(t => t.Point1).ToList(); shortPath.Add(to); return shortPath; }
/// <summary> /// eliminate redundant rows in sparse matrix structure, based on extreme points selection. /// </summary> /// <param name="rowsIntheSameGroup"></param> /// <returns></returns> public static HashSet<Row> EliminateRedundantRowsSubExtreme(HashSet<Row> rowsIntheSameGroup) { if(rowsIntheSameGroup.Count<=1) { return rowsIntheSameGroup; } var clonedRows = Row.CloneSetRows(rowsIntheSameGroup); var i = 0; var rows1 = clonedRows.ToDictionary(item => (i++), item => item); var j = 0; var row2 = rowsIntheSameGroup.ToDictionary(item => (j++), item => item); var resultingRows = new HashSet<int>(); var workingstack = new Stack<Dictionary<int, Row>>(); workingstack.Push(rows1); //rows with same extreme col value. The reductions is done based on the rest columns. while (workingstack.Count != 0) { var rows = workingstack.Pop(); if (rows.Count > 1) { var colMaxValue = new Dictionary<int, double>(); var colMaxRow = new Dictionary<int, Dictionary<int, Row>>(); var colIndexHashSet = new HashSet<int>(); bool flag2 = true; foreach (KeyValuePair<int, Row> keyValuePair in rows) { if (flag2) { foreach (int i1 in keyValuePair.Value.col) { if (colIndexHashSet.Contains(i1)) { flag2 = false; } } } colIndexHashSet.UnionWith(keyValuePair.Value.col); } if (flag2) { resultingRows.UnionWith(rows.Keys); continue; } var rowsIndex = new List<int>(rows.Keys); foreach (int colIndex in colIndexHashSet) { var flag = false; for(int i2=rowsIndex.Count-1;i2>=0;i2--) //rows { var currentRowIndex = rowsIndex[i2]; var currentRow1 = rows[currentRowIndex]; if(currentRow1.col.Contains(colIndex)) { var valIndex = currentRow1.col.BinarySearch(colIndex); var colValue = currentRow1.val[valIndex]; if (colValue < 0) { if (colMaxValue.ContainsKey(colIndex)) { if (colMaxValue[colIndex] > colValue) { colMaxValue[colIndex] = colValue; colMaxRow[colIndex] = new Dictionary<int, Row> { { currentRowIndex, currentRow1 } }; //can also be equal } else if (Math.Abs(colMaxValue[colIndex] - colValue) < EPSILON) { colMaxRow[colIndex].Add(currentRowIndex, currentRow1); } } else { colMaxValue[colIndex] = colValue; colMaxRow[colIndex] = new Dictionary<int, Row> { { currentRowIndex, currentRow1 } }; } flag = true; }else { currentRow1.col.RemoveAt(valIndex); //if positive, like, 1, remove. currentRow1.val.RemoveAt(valIndex); } } } //remove the rows that already added to colMaxRow[colIndex] if (flag) { //if (colMaxRow.ContainsKey(colIndex)) // { foreach (int key in colMaxRow[colIndex].Keys) { rows.Remove(key); rowsIndex.Remove(key); } // } } // var rowsIndex = new List<int>(colMaxRow.Keys); foreach (KeyValuePair<int, Dictionary<int, Row>> keyValuePair in colMaxRow) { Dictionary<int, Row> rowsDictionary = keyValuePair.Value; //List<int> rowsIndex2 = new List<int>(keyValuePair.Value.Keys.Count); //List<int> rowsIndex3 = new List<int>(keyValuePair.Value.Keys.Count); //foreach (KeyValuePair<int, Row> intRowPair in keyValuePair.Value) //{ // if (intRowPair.Value.col.Count == 1) // { // rowsIndex2.Add(intRowPair.Key); // } //} //////if there is only one element in a row, and rows with the same values then, remove //////List<int> rowsIndex=new List<int>(keyValuePair.Value.Keys); //for (int inneri = rowsIndex2.Count - 1; inneri >= 1; inneri--) //{ // var inneriIndex = rowsIndex2[inneri]; // var row = rowsDictionary[inneriIndex]; // for (int innerj = inneri - 1; innerj >= 0; innerj--) // { // var innerjIndex = rowsIndex2[innerj]; // var rowj = rowsDictionary[innerjIndex]; // //if all equal.then, this is a identical /redundant row, remove // if (row.col[0]==rowj.col[0]&&Math.Abs(row.val[0] - rowj.val[0]) < EPSILON) // { // rowsDictionary.Remove(inneriIndex); // break; // } // } //} //rowsIndex = new List<int>(keyValuePair.Value.Keys); //// remove similar rows //for (int inneri = rowsDictionary.Count - 1; inneri >= 1; inneri--) //{ // var inneriIndex = rowsIndex[inneri]; // var row = rowsDictionary[inneriIndex]; // if(row.col.Count>2) // { // continue; // } // //if (row.col[0] == i) // //{ // for (int innerj = inneri - 1; innerj >= 0; innerj--) // { // var innerjIndex = rowsIndex[innerj]; // var rowj = rowsDictionary[innerjIndex]; // if (row.col.Count != rowj.col.Count) // { // continue; // } // bool flag = true; // for (int k = 0; k < row.col.Count; k++) // { // if (row.col[k] == rowj.col[k]) // //GL:TODO:can improve, try int compare // { // if (Math.Abs(row.val[k] - rowj.val[k]) > EPSILON) // { // flag = false; // break; // } // } // else // { // flag = false; // break; // } // } // //if all equal.then, this is a identical /redundant row, remove // if (flag) // { // rowsDictionary.Remove(inneriIndex); // break; // } // } // //find sub extreme points and remove rest //} //recursive if number of rows larger than 1 if (rowsDictionary.Count > 1) { var nonZeroElementDictionary = new Dictionary<int, Row>(); foreach (KeyValuePair<int, Row> rowPair in rowsDictionary) { var index=rowPair.Value.col.BinarySearch(keyValuePair.Key); rowPair.Value.col.RemoveAt(index); rowPair.Value.val.RemoveAt(index); if (rowPair.Value.col.Count > 0) { nonZeroElementDictionary.Add(rowPair.Key, rowPair.Value); } else { resultingRows.Add(rowPair.Key); break; //only one-elements rows, keep one to delete same rows } } if(nonZeroElementDictionary.Count>0) { workingstack.Push(nonZeroElementDictionary); } } else { Debug.Assert(rowsDictionary.Keys.Count > 0, "true1"); resultingRows.UnionWith(rowsDictionary.Keys); } } } else { Debug.Assert(rows.Keys.Count > 0, "true2"); resultingRows.UnionWith(rows.Keys); } } var returnRows = new HashSet<Row>(); foreach (int k in resultingRows) { returnRows.Add(row2[k]); } Debug.Assert(resultingRows.Count>0,"true"); return returnRows; }
public static SparseMatrixMDP GaussianJordenElimination(SparseMatrixMDP smatrixMdp) { var r = smatrixMdp.Ngroup; var groups = smatrixMdp.Groups; for (int i = 0; i < r; i++) //operating on each group, i can take as pivot position { var currentGroup = groups[i]; //rows in ith group should normalized according to the element in its pivot position (ith group, ith column) Debug.Assert(groups != null, "groups != null"); foreach (Row currentRow1 in currentGroup.RowsInSameGroup) { if (currentRow1.col[0] == i) //TODO:I think this info is redundant { if (Math.Abs(currentRow1.val[0] - 1) > EPSILON) { for (int k = 1; k < currentRow1.val.Count; k++) { currentRow1.val[k] = currentRow1.val[k]/currentRow1.val[0]; } } currentRow1.col.RemoveAt(0); currentRow1.val.RemoveAt(0); } } //currentRow1.colIndexCurrentRow = currentRow1.col.BinarySearch(i); //using current row to deduct every rows in other group if corresponding element (column i in dense matrix) is not zero //follow upper triangular form. for (int j = 0; j < r; j++) //scan all groups { if (j == i) { continue; } //update rows in group j var pivotGroup = groups[j]; var removeCollection = new HashSet<Row>(); var addCollection = new HashSet<Row>(); foreach (Row pivotRow in pivotGroup.RowsInSameGroup) { if (pivotRow.col[0] == i) //index of col for column value in dense matrix equal to pivot element. if value is zero, it will not be found { double t = -pivotRow.val[0]; //var hashset1 = new HashSet<int>(pivotRow.col); //hashset1.UnionWith(new HashSet<int>(currentRow.col)); pivotRow.col.RemoveAt(0); pivotRow.val.RemoveAt(0); foreach (Row currentRow in currentGroup.RowsInSameGroup) { // bool flag; if (!CheckDictionarySubset(pivotRow.SelectionMemoryInGroups, currentRow.SelectionMemoryInGroups)) { continue; } var r3 = new Row { diag = pivotRow.diag, diagCol = pivotRow.diagCol, // val = new List<double>(hashset1.Count), // col = hashset1.ToList(), Index = pivotRow.Index, }; //add elements in colValuePair var hashset1 = new HashSet<int>(currentRow.col); hashset1.UnionWith(new HashSet<int>(pivotRow.col)); var colValuePair = new Dictionary<int, double>(hashset1.ToDictionary(item => item, item => 0.0)); for (int inneri = 0; inneri < currentRow.col.Count; inneri++) { if (Math.Abs(currentRow.val[inneri]) > EPSILON) { colValuePair[currentRow.col[inneri]] += currentRow.val[inneri]*t; } } //add workingRow to r3 //var hashset2 = new HashSet<int>(); for (int inneri = 0; inneri < pivotRow.col.Count; inneri++) { double m = colValuePair[pivotRow.col[inneri]] + pivotRow.val[inneri]; if (Math.Abs(m) > EPSILON) { colValuePair[pivotRow.col[inneri]] = m; } } r3.col = new List<int>(colValuePair.Keys); r3.val = new List<double>(colValuePair.Values); // r3.SelectionMemoryInGroups = new Dictionary<Group, Row>(pivotRow.SelectionMemoryInGroups); // r3.SelectionMemoryInGroups[currentGroup] = currentRow; r3.SelectionMemoryInGroups = new Dictionary<int,int>(pivotRow.SelectionMemoryInGroups); r3.SelectionMemoryInGroups[currentGroup.Index] = currentRow.Index; //update diag elements in that row. After row adding, the pivot element/diag info is redundant. // r3.diag = r3.col.Contains(r3.diagCol) ? colValuePair[r3.diagCol] : 0; //pivotGroup.RowsInSameGroup.Add(r3); addCollection.Add(r3); if(pivotRow.SelectionMemoryInGroups.ContainsKey(currentGroup.Index)) { break; } //If pivotRow contains currentGroup index, and currentRow can work with pivotRow, //that means the rest of rows in the group are not that useful. } removeCollection.Add(pivotRow); } } //update rows in current group foreach (Row row in removeCollection) { pivotGroup.RowsInSameGroup.Remove(row); } foreach (Row row in addCollection) { pivotGroup.RowsInSameGroup.Add(row); } } Debug.WriteLine(smatrixMdp); } return smatrixMdp; }
public static ReadOnlyCollection<ReadOnlyDictionary<String, String>> ParseMulti(this String input, String pattern, RegexOptions options) { if (input == null) return null; var names = new HashSet<String>(); var m_meta = Regex.Match(pattern, @"\(\?\<(?<name>.*?)\>"); for (; m_meta.Success; m_meta = m_meta.NextMatch()) { var name = m_meta.Result("${name}"); names.Add(name); } var ms = input.MatchMulti(pattern, options); if (ms == null) return null; return ms.Select(m => { if (!m.Success) return null; return names.ToDictionary(name => name, name => m.Result("${" + name + "}")).ToReadOnly(); }).Where(m => m != null).ToReadOnly(); }
public List<Account> GetUserList(string searchString) { Require.NotEmpty(searchString, nameof(searchString)); var allProjectMemberships = _projectMembershipRepostiory.GetAllProjectMemberships().ToList(); var allUsers = new HashSet<Account>(_userRepository.GetAllAccounts()); var allUsersToSearchByRole = new HashSet<Account>( allProjectMemberships.Select(membership => allUsers.Single(account => account.UserId == membership.DeveloperId))); var userRolesDictionary = allUsersToSearchByRole.ToDictionary(user => user, user => allProjectMemberships.Where(membership => membership.DeveloperId == user.UserId) .Select(that => that.Role)); return userRolesDictionary.Where( pair => pair.Value.Any(role => Extensions.Contains(role, searchString))) .Select(pair => pair.Key) .Union( allUsers.Where( account => Extensions.Contains($"{account.Firstname} {account.Lastname}", searchString))).ToList(); }
public Dictionary<User, HashSet<string>> GetSecondary(HashSet<User> users) { return users.ToDictionary(user => user, user => GetSecondary(user).ToHashSet()); }
/// <summary> /// Build a dictionary of mixins /// </summary> /// <param name="finalMixins">a list of mixins</param> /// <returns>a dictionary of all the necessary mixins</returns> private Dictionary<string, ModuleMixin> BuildMixinDictionary(IEnumerable<ModuleMixin> finalMixins) { var allMixins = new HashSet<ModuleMixin>(); foreach (var mixin in finalMixins) { if (allMixins.All(x => x.MixinName != mixin.MixinName)) allMixins.Add(mixin); } return allMixins.ToDictionary(x => x.MixinName, x => x); }
public Dictionary<Webpage, HashSet<string>> GetPrimary(HashSet<Webpage> webpages) { return webpages.ToDictionary(webpage => webpage, webpage => GetPrimaryTerms(webpage).ToHashSet()); }
public static SparseMatrixMDP GaussianJordenElimination(SparseMatrixMDP smatrixMdp, bool first) { var r = smatrixMdp.Ngroup; // Debug.WriteLine(smatrixMdp); //reduce if(first) { foreach (var group in smatrixMdp.Groups) { group.RowsInSameGroup = LinearEquationsSolver.EliminateRedundantRowsSubExtreme(group.RowsInSameGroup); } } // Debug.WriteLine(smatrixMdp); var groups = smatrixMdp.Groups; for (int i = 0; i < r; i++) //operating on each group, i can take as pivot position { var currentGroup = groups[i]; //rows in ith group should normalized according to the element in its pivot position (ith group, ith column) //Debug.Assert(groups != null, "groups != null"); // var initialZeroRows = new HashSet<Row>(); foreach (Row currentRow1 in currentGroup.RowsInSameGroup) { // Debug.Assert(currentRow1.col[0] == i, "zero value"); if (currentRow1.col[0] == i) //TODO:I think this info is redundant { double currentFirstValue = currentRow1.val[0]; if (Math.Abs(currentFirstValue - 1) > EPSILON) { for (int k = 1; k < currentRow1.val.Count; k++) { currentRow1.val[k] = currentRow1.val[k]/currentFirstValue; } } currentRow1.col.RemoveAt(0); currentRow1.val.RemoveAt(0); currentRow1.diag = 1; } //else // { // initialZeroRows.Add(currentRow1); // } } currentGroup.RowsInSameGroup = LinearEquationsSolver.EliminateRedundantRowsSubExtreme(currentGroup.RowsInSameGroup); //using current row to deduct every rows in other group if corresponding element (column i in dense matrix) is not zero //follow upper triangular form. for (int j = 0; j < r; j++) //scan all groups { if (j == i) { continue; } //update rows in group j var pivotGroup = groups[j]; var removeCollection = new HashSet<Row>(); var addCollection = new HashSet<Row>(); foreach (Row pivotRow in pivotGroup.RowsInSameGroup) { if (pivotRow.col[0] == i) //index of col for column value in dense matrix equal to pivot element. if value is zero, it will not be found { double t = -pivotRow.val[0]; pivotRow.col.RemoveAt(0); pivotRow.val.RemoveAt(0); if(currentGroup.RowsInSameGroup.Count>1) { foreach (Row currentRow in currentGroup.RowsInSameGroup) { // bool flag; if (!CheckDictionarySubset(pivotRow.SelectionMemoryInGroups, currentRow.SelectionMemoryInGroups,pivotRow.WorkingGroup,currentRow.WorkingGroup)) { continue; } var r3 = new Row { diag = pivotRow.diag, diagCol = pivotRow.diagCol, Index = pivotRow.Index, }; //add elements in colValuePair // var hashset1 = new HashSet<int>(currentRow.col); //hashset1.UnionWith(pivotRow.col); var colValuePair = new SortedDictionary<int, double>(); for (int inneri = 0; inneri < currentRow.col.Count; inneri++) { //if (Math.Abs(currentRow.val[inneri]) > EPSILON) //{ colValuePair[currentRow.col[inneri]] = currentRow.val[inneri] * t; // } if(currentRow.col[inneri]==r3.diagCol) { r3.diag = r3.diag + currentRow.val[inneri] * t; } } //add workingRow to r3 //var hashset2 = new HashSet<int>(); for (int inneri = 0; inneri < pivotRow.col.Count; inneri++) { if (colValuePair.ContainsKey(pivotRow.col[inneri])) { double m = colValuePair[pivotRow.col[inneri]] + pivotRow.val[inneri]; if (Math.Abs(m) > EPSILON) { colValuePair[pivotRow.col[inneri]] = m; } }else { colValuePair[pivotRow.col[inneri]] = pivotRow.val[inneri]; } } r3.col = new List<int>(colValuePair.Keys); r3.val = new List<double>(colValuePair.Values); r3.SelectionMemoryInGroups = new Dictionary<int, int>(pivotRow.SelectionMemoryInGroups); r3.WorkingGroup=new BitArray(pivotRow.WorkingGroup); foreach (KeyValuePair<int, int> d in currentRow.SelectionMemoryInGroups) { r3.SelectionMemoryInGroups[d.Key] = d.Value; r3.WorkingGroup.Set(d.Key,true); } addCollection.Add(r3); if (pivotRow.SelectionMemoryInGroups.ContainsKey(currentGroup.Index)) { break; } } }else { foreach (Row currentRow in currentGroup.RowsInSameGroup) { var r3 = new Row { diag = pivotRow.diag, diagCol = pivotRow.diagCol, Index = pivotRow.Index, }; //add elements in colValuePair var hashset1 = new HashSet<int>(currentRow.col); hashset1.UnionWith(new HashSet<int>(pivotRow.col)); var colValuePair = new SortedDictionary<int, double>(hashset1.ToDictionary(item => item, item => 0.0)); for (int inneri = 0; inneri < currentRow.col.Count; inneri++) { //if (Math.Abs(currentRow.val[inneri]) > EPSILON) //{ colValuePair[currentRow.col[inneri]] = currentRow.val[inneri] * t; // } if (currentRow.col[inneri] == r3.diagCol) { r3.diag = r3.diag + currentRow.val[inneri] * t; } } for (int inneri = 0; inneri < pivotRow.col.Count; inneri++) { double m = colValuePair[pivotRow.col[inneri]] + pivotRow.val[inneri]; if (Math.Abs(m) > EPSILON) { colValuePair[pivotRow.col[inneri]] = m; } } r3.col = new List<int>(colValuePair.Keys); r3.val = new List<double>(colValuePair.Values); r3.SelectionMemoryInGroups = new Dictionary<int, int>(pivotRow.SelectionMemoryInGroups); r3.WorkingGroup = new BitArray(pivotRow.WorkingGroup); foreach (KeyValuePair<int, int> d in currentRow.SelectionMemoryInGroups) { r3.SelectionMemoryInGroups[d.Key] = d.Value; r3.WorkingGroup.Set(d.Key, true); } addCollection.Add(r3); } } removeCollection.Add(pivotRow); } } //update rows in current group foreach (Row row in removeCollection) { pivotGroup.RowsInSameGroup.Remove(row); } //clear redundant rows in addCollection //pivotGroup.RowsInSameGroup.UnionWith(EliminateRedundantRows(addCollection)); //var reducedRows = EliminateRedundantRowsSubExtreme(addCollection); pivotGroup.RowsInSameGroup.UnionWith(addCollection); if (pivotGroup.RowsInSameGroup.Count > smatrixMdp.Ncols) //r is not correct bound. { //if (pivotGroup.Index > currentGroup.Index) //{ // pivotGroup.RowsInSameGroup = //EliminateRedundantRowsSubExtremeWithNormalization(pivotGroup.RowsInSameGroup); //}else //{ // pivotGroup.RowsInSameGroup = //EliminateRedundantRowsSubExtreme(pivotGroup.RowsInSameGroup); //} pivotGroup.RowsInSameGroup = pivotGroup.Index > currentGroup.Index ? EliminateRedundantRowsSubExtremeWithNormalization(pivotGroup.RowsInSameGroup) : EliminateRedundantRowsSubExtreme(pivotGroup.RowsInSameGroup); } } //Debug.WriteLine(smatrixMdp); } foreach (var group in smatrixMdp.Groups) { group.RowsInSameGroup = LinearEquationsSolver.EliminateRedundantRowsSubExtreme(group.RowsInSameGroup); } return smatrixMdp; }
private static List<Reference> ResolveReferences(Queue<AtomModel> sortedReferences, HashSet<AtomModel> projectedAtoms) { // Pick the first ref to be the atomModel var primaryRef = new SimpleReference(sortedReferences.Dequeue()); var references = new List<Reference> { primaryRef }; var referenceMap = new Dictionary<string, Reference> { { primaryRef.Name, primaryRef } }; var atomDependencyMap = projectedAtoms.ToDictionary(atom => atom.Name, atom => atom.GetDependencies()); foreach (var atomRelation in sortedReferences) { var posibleReferences = atomDependencyMap[atomRelation.Name]; if (!posibleReferences.Any()) { var r = new SimpleReference(atomRelation); references.Add(r); referenceMap.Add(r.Name, r); } else { var referenceName = posibleReferences.First(referenceMap.ContainsKey); var reference = referenceMap[referenceName]; var resolvedDependencies = posibleReferences.Where(referenceMap.ContainsKey) .Select(p => referenceMap[p]); var r = new ResolvedReference(atomRelation, resolvedDependencies); references.Add(r); referenceMap.Add(r.Name, r); } } return references; }
public IEnumerable<ReduceTypePerKey> GetReduceTypesPerKeys(string indexName, int limitOfItemsToReduceInSingleStep) { var allKeysToReduce = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase); foreach (var reduction in storage.ScheduleReductions["ByViewLevelReduceKeyAndBucket"].SkipTo(new RavenJObject { {"view", indexName}, {"level", 0} }).TakeWhile(x => string.Equals(indexName, x.Value<string>("view"), StringComparison.InvariantCultureIgnoreCase) && x.Value<int>("level") == 0)) { allKeysToReduce.Add(reduction.Value<string>("reduceKey")); } var reduceTypesPerKeys = allKeysToReduce.ToDictionary(x => x, x => ReduceType.SingleStep); foreach (var reduceKey in allKeysToReduce) { var count = GetNumberOfMappedItemsPerReduceKey(indexName, reduceKey); if (count >= limitOfItemsToReduceInSingleStep) { reduceTypesPerKeys[reduceKey] = ReduceType.MultiStep; } } return reduceTypesPerKeys.Select(x => new ReduceTypePerKey(x.Key, x.Value)); }
/// <summary> /// Moves items from the Stash to the Backpack /// </summary> /// <param name="itemIds">list of items to withdraw</param> /// <param name="maxAmount">amount to withdraw up to (including counts already in backpack)</param> /// <returns></returns> public static async Task<bool> Execute(IEnumerable<int> itemIds, int maxAmount) { Logger.Log("TakeItemsFromStash Started!"); if (!ZetaDia.IsInGame || !ZetaDia.IsInTown) return true; if (Town.Locations.Stash.Distance(ZetaDia.Me.Position) > 3f) { await MoveToAndInteract.Execute(Town.Locations.Stash, Town.ActorIds.Stash, 8f); } var stash = Town.Actors.Stash; if (stash == null) { Logger.Log("Unable to find Stash"); return false; } if (!UIElements.StashWindow.IsVisible && Town.Locations.Stash.Distance(ZetaDia.Me.Position) <= 10f) { Logger.Log("Stash window not open, interacting"); stash.Interact(); } var itemIdsHashSet = new HashSet<int>(itemIds); var amountWithdrawn = itemIdsHashSet.ToDictionary(k => k, v => (long)0); var overageTaken = itemIdsHashSet.ToDictionary(k => k, v => false); var lastStackTaken = itemIdsHashSet.ToDictionary(k => k, v => default(ACDItem)); foreach (var item in ZetaDia.Me.Inventory.Backpack.Where(i => i.ACDGuid != 0 && i.IsValid && itemIdsHashSet.Contains(i.ActorSNO)).ToList()) { amountWithdrawn[item.ActorSNO] += item.ItemStackQuantity; lastStackTaken[item.ActorSNO] = item; } foreach (var item in ZetaDia.Me.Inventory.StashItems.Where(i => i.ACDGuid != 0 && i.IsValid && itemIdsHashSet.Contains(i.ActorSNO)).ToList()) { try { if (!item.IsValid || item.IsDisposed) continue; var stackSize = item.ItemStackQuantity; var numTakenAlready = amountWithdrawn[item.ActorSNO]; // We have enough of this material already var alreadyTakenEnough = numTakenAlready >= maxAmount; if (alreadyTakenEnough) continue; // We have enough of everything already. if (amountWithdrawn.All(i => i.Value >= maxAmount)) break; // Only take up to the required amount. var willBeOverMax = numTakenAlready + stackSize > maxAmount; if (!willBeOverMax || !overageTaken[item.ActorSNO]) { var lastItem = lastStackTaken[item.ActorSNO]; var amountRequiredToMax = maxAmount - numTakenAlready; if (willBeOverMax && lastItem != null && lastItem.IsValid && !lastItem.IsDisposed && stackSize > amountRequiredToMax) { // Tried InventoryManager.SplitStack but it didnt work, reverting to moving onto existing stacks. var amountToSplit = stackSize - lastItem.ItemStackQuantity; Logger.Log("Merging Stash Stack {0} onto Backpack Stack. StackSize={1} WithdrawnAlready={2}", item.Name, amountToSplit, numTakenAlready); ZetaDia.Me.Inventory.MoveItem(item.DynamicId, ZetaDia.Me.CommonData.DynamicId, InventorySlot.BackpackItems, lastItem.InventoryColumn, lastItem.InventoryRow); amountWithdrawn[item.ActorSNO] += amountToSplit; overageTaken[item.ActorSNO] = true; } else { Logger.Log("Removing {0} ({3}) from stash. StackSize={1} WithdrawnAlready={2}", item.Name, stackSize, numTakenAlready, item.ActorSNO); if (item.IsValid && !item.IsDisposed) { ZetaDia.Me.Inventory.QuickWithdraw(item); amountWithdrawn[item.ActorSNO] += stackSize; lastStackTaken[item.ActorSNO] = item; } } await Coroutine.Sleep(25); await Coroutine.Yield(); } } catch (Exception ex) { Logger.LogError(ex.ToString()); } } await Coroutine.Sleep(1000); Logger.Log("TakeItemsFromStash Finished!"); return true; }