public static MultiMap<string> CreateMap() { _Log.Trace("Started"); MultiMap<string> MultiMap = new MultiMap<string>(); try { List<DLSSEListener.SSEListenerData> SSEListenerList = SSEListener.Retrieve(); _Log.Debug(LogHelper.Get("SSEListenerList:=", new { SSEListenerList = SSEListenerList })); foreach (DLSSEListener.SSEListenerData url in SSEListenerList) { //add orgid and shortcode values for same url key MultiMap.Add(url.Url, url.OrgId); MultiMap.Add(url.Url, url.ShortCode); } } catch (Exception ex) { _Log.Error(ex.Message, ex); throw ex; } _Log.Debug(LogHelper.Get("Final MultiMap Created:=", new { MultiMap = MultiMap })); return MultiMap; }
private void LoadMetaData(object sender, RoutedEventArgs e) { //load images for comparison MultiMap<string, string> fileNamesToLocalPaths = new MultiMap<string, string>(StringComparer.CurrentCultureIgnoreCase); List<string> localPaths = Directory.GetFiles(Configuration.ImagesFolder, "*.*", SearchOption.AllDirectories).ToList(); localPaths.AddRange(Directory.GetFiles(Configuration.CameraFolder, "*.*", SearchOption.AllDirectories)); foreach (string path in localPaths) { string fileName = System.IO.Path.GetFileName(path); fileNamesToLocalPaths.Add(fileName, path); } foreach (SynergyRowObject sro in rowObjects) { sro.FileName = System.IO.Path.GetFileName(sro.DevicePath); int count = 0; if (fileNamesToLocalPaths.ContainsKey(sro.FileName)) { count = fileNamesToLocalPaths[sro.FileName].Count; } sro.FileNameMatchCount = count; } }
internal TeamCityExtension(string flowId) { this.flowId = flowId ?? Hash64.CreateUniqueHash().ToString(); currentStepStack = new Stack<string>(); continuationMap = new MultiMap<string, Continuation>(); }
public RegistryData() { pluginsByPluginId = new Dictionary<string, PluginDescriptor>(); servicesByServiceId = new Dictionary<string, ServiceDescriptor>(); servicesByServiceTypeName = new Dictionary<TypeName, ServiceDescriptor>(); componentsByComponentId = new Dictionary<string, ComponentDescriptor>(); componentsByServiceId = new MultiMap<string, ComponentDescriptor>(); }
public void Enumerate_Empty_Ok() { MultiMap<string, string> m = new MultiMap<string, string>(); foreach (var map in m) { Assert.Fail(); } }
public void Add_AddsItemToCorrectCollection() { var map = new MultiMap<int, string>(); map.Add(1, "1"); map.Add(42, "42"); Assert.IsTrue(map[1].SequenceEqual(new[] { "1"} )); Assert.IsTrue(map[42].SequenceEqual(new[] { "42" })); }
public void AddRange_AddsItemsToCorrectCollection() { var map = new MultiMap<int, string>(); map.AddRange(1, Enumerable.Range(1, 10).Select(x => x.ToString())); map.AddRange(42, Enumerable.Repeat(42, 42).Select(x => x.ToString())); Assert.IsTrue(map[1].SequenceEqual(Enumerable.Range(1, 10).Select(x => x.ToString()))); Assert.IsTrue(map[42].SequenceEqual(Enumerable.Repeat(42, 42).Select(x => x.ToString()))); }
public void SetUp() { _mmp = new MultiMap<string, int>(); _mmp.Add("Odd", 1); _mmp.Add("Odd", 3); _mmp.Add("Eve", 4); _mmp.Add("Eve", 8); _mmp.Add("Eve", 6); }
public void Remove_RemovesItemFromTheCorrectCollection() { var map = new MultiMap<int, string>(); map.AddRange(1, new []{"1", "2", "3"}); map.AddRange(42, new [] { "42", "42", "42"}); map.Remove(1, "2"); Assert.IsTrue(map[1].SequenceEqual(new []{"1", "3"})); Assert.IsTrue(map[42].SequenceEqual(new []{"42", "42", "42"})); }
public Listener() { _Log.Trace("Started"); if (MultiMap == null) { MultiMap = CreateMap(); } _Log.Trace("Ended"); }
public void Indexer_ReturnsCorrectCollection() { var map = new MultiMap<int, string>(); map.AddRange(1, new[] { "1", "2", "3" }); map.AddRange(42, new[] { "42", "42", "42" }); var colFor1 = map[1]; var colFor42 = map[42]; Assert.IsTrue(colFor1.SequenceEqual(new[] { "1", "2", "3" })); Assert.IsTrue(colFor42.SequenceEqual(new[] { "42", "42", "42" })); }
protected DebugTemplate(DebugTemplate prototype, bool shadowLocals, Template enclosingInstance) : base(prototype, shadowLocals, enclosingInstance) { if (shadowLocals) { newSTEvent = prototype.newSTEvent; addAttrEvents = prototype.addAttrEvents; } else { newSTEvent = new ConstructionEvent(); addAttrEvents = new MultiMap<string, AddAttributeEvent>(); } }
public void InterpolationTest() { double[] x = { 0, 1, 3, 6, 7 }; double[] y = { 0.8, 3.1, 4.5, 3.9, 2.8 }; var t = new double[15]; for(var i = 0; i < 15; i++) t[i] = i / 2.0; var values = new MultiMap<string, double>(); foreach(var method in _interpolators.Keys) log.Debug("{0} : {1}", method, _interpolators[method].Interpolate(x, y, t).CollectionToString()); }
/// <summary> /// Creates a pattern evaluator. /// </summary> /// <param name="testModelBuilder">The test model builder.</param> /// <param name="patternResolver">The pattern resolver.</param> /// <exception cref="ArgumentNullException">Thrown if <paramref name="testModelBuilder"/> or /// <paramref name="patternResolver"/> is null.</exception> public DefaultPatternEvaluator(ITestModelBuilder testModelBuilder, IPatternResolver patternResolver) { if (testModelBuilder == null) throw new ArgumentNullException("testModelBuilder"); if (patternResolver == null) throw new ArgumentNullException("patternResolver"); this.testModelBuilder = testModelBuilder; this.patternResolver = patternResolver; registeredScopes = new MultiMap<ICodeElementInfo, IPatternScope>(); var rootDataContextBuilder = new DefaultTestDataContextBuilder(testModelBuilder, new PatternTestDataContext(null)); rootDataContextBuilder.DefineDataSource(""); rootScope = new DefaultPatternScope(this, null, testModelBuilder.RootTestBuilder, null, rootDataContextBuilder, false); }
public void Enumerate_KeyHasMultiple_ReturnsBoth() { MultiMap<string, string> m = new MultiMap<string, string>(); m.Add("fruit","apple"); m.Add("vegetable","broccoli"); m.Add("fruit", "orange"); var returned = new List<KeyValuePair<string, string>>(); foreach (var pair in m) { returned.Add(pair); } Assert.AreEqual(3, returned.Count); Assert.AreEqual("apple", returned[0].Value); Assert.AreEqual("orange", returned[1].Value); Assert.AreEqual("broccoli", returned[2].Value); Assert.AreEqual("fruit", returned[0].Key); Assert.AreEqual("fruit", returned[1].Key); Assert.AreEqual("vegetable", returned[2].Key); }
public async void Start() { _Log.Trace("Started"); if (MultiMap == null) { MultiMap = CreateMap(); _Log.Debug(LogHelper.Get("MultiMap Created:=", new { MultiMap = MultiMap })); } try { await StartProcess(); } catch (Exception ex) { _Log.Error(ex.Message, ex); throw ex; } _Log.Trace("Ended"); }
/// <summary> /// Constructs a new instance of /// <see cref="ParseTreeMatch"/> /// from the specified /// parse tree and pattern. /// </summary> /// <param name="tree">The parse tree to match against the pattern.</param> /// <param name="pattern">The parse tree pattern.</param> /// <param name="labels"> /// A mapping from label names to collections of /// <see cref="Antlr4.Runtime.Tree.IParseTree"/> /// objects located by the tree pattern matching process. /// </param> /// <param name="mismatchedNode"> /// The first node which failed to match the tree /// pattern during the matching process. /// </param> /// <exception> /// IllegalArgumentException /// if /// <paramref name="tree"/> /// is /// <see langword="null"/> /// </exception> /// <exception> /// IllegalArgumentException /// if /// <paramref name="pattern"/> /// is /// <see langword="null"/> /// </exception> /// <exception> /// IllegalArgumentException /// if /// <paramref name="labels"/> /// is /// <see langword="null"/> /// </exception> public ParseTreeMatch(IParseTree tree, ParseTreePattern pattern, MultiMap<string, IParseTree> labels, IParseTree mismatchedNode) { if (tree == null) { throw new ArgumentException("tree cannot be null"); } if (pattern == null) { throw new ArgumentException("pattern cannot be null"); } if (labels == null) { throw new ArgumentException("labels cannot be null"); } this.tree = tree; this.pattern = pattern; this.labels = labels; this.mismatchedNode = mismatchedNode; }
public void OnBreakpointsChanged(MultiMap<string, Breakpoint> breakpoints) { this.SuspendLayout(); this.treeViewBreakpoints.BeginUpdate(); this._model.Nodes.Clear(); foreach (string key in breakpoints.Keys) { foreach (Breakpoint breakpoint in breakpoints[key]) { BreakpointNode node = new BreakpointNode(breakpoint.ClassName); node.Breakpoint = breakpoint; this._model.Nodes.Add(node); } } this.treeViewBreakpoints.EndUpdate(); this.ResumeLayout(); }
public AnnotationIndex(Func<Annotation, bool> includeIndexPredicate, Func<Annotation, string> keyMakingFunction) { _keyToObjectsMap = new MultiMap<string, Annotation>(); _includeIndexPredicate = includeIndexPredicate; _keyMakingFunction = keyMakingFunction; }
public virtual DFA CreateLL_1_LookaheadDFA( int decision ) { Decision d = GetDecision( decision ); string enclosingRule = d.startState.enclosingRule.Name; Rule r = d.startState.enclosingRule; NFAState decisionStartState = GetDecisionNFAStartState( decision ); if ( composite.WatchNFAConversion ) { Console.Out.WriteLine( "--------------------\nattempting LL(1) DFA (d=" + decisionStartState.DecisionNumber + ") for " + decisionStartState.Description ); } if ( r.IsSynPred && !synPredNamesUsedInDFA.Contains( enclosingRule ) ) { return null; } // compute lookahead for each alt int numAlts = GetNumberOfAltsForDecisionNFA( decisionStartState ); LookaheadSet[] altLook = new LookaheadSet[numAlts + 1]; for ( int alt = 1; alt <= numAlts; alt++ ) { int walkAlt = decisionStartState.TranslateDisplayAltToWalkAlt( alt ); NFAState altLeftEdge = GetNFAStateForAltOfDecision( decisionStartState, walkAlt ); NFAState altStartState = (NFAState)altLeftEdge.transition[0].Target; //[email protected]("alt "+alt+" start state = "+altStartState.stateNumber); altLook[alt] = ll1Analyzer.Look( altStartState ); //[email protected]("alt "+alt+": "+altLook[alt].toString(this)); } // compare alt i with alt j for disjointness bool decisionIsLL_1 = true; for ( int i = 1; i <= numAlts; i++ ) { for ( int j = i + 1; j <= numAlts; j++ ) { /* [email protected]("compare "+i+", "+j+": "+ altLook[i].toString(this)+" with "+ altLook[j].toString(this)); */ LookaheadSet collision = altLook[i].Intersection( altLook[j] ); if ( !collision.IsNil ) { //[email protected]("collision (non-LL(1)): "+collision.toString(this)); decisionIsLL_1 = false; goto outer; } } } outer: bool foundConfoundingPredicate = ll1Analyzer.DetectConfoundingPredicates( decisionStartState ); if ( decisionIsLL_1 && !foundConfoundingPredicate ) { // build an LL(1) optimized DFA with edge for each altLook[i] if ( NFAToDFAConverter.debug ) { Console.Out.WriteLine( "decision " + decision + " is simple LL(1)" ); } DFA lookaheadDFA2 = new LL1DFA( decision, decisionStartState, altLook ); SetLookaheadDFA( decision, lookaheadDFA2 ); UpdateLineColumnToLookaheadDFAMap( lookaheadDFA2 ); return lookaheadDFA2; } // not LL(1) but perhaps we can solve with simplified predicate search // even if k=1 set manually, only resolve here if we have preds; i.e., // don't resolve etc... /* SemanticContext visiblePredicates = ll1Analyzer.getPredicates(decisionStartState); boolean foundConfoundingPredicate = ll1Analyzer.detectConfoundingPredicates(decisionStartState); */ // exit if not forced k=1 or we found a predicate situation we // can't handle: predicates in rules invoked from this decision. if ( GetUserMaxLookahead( decision ) != 1 || // not manually set to k=1 !GetAutoBacktrackMode( decision ) || foundConfoundingPredicate ) { //[email protected]("trying LL(*)"); return null; } IList<IIntSet> edges = new List<IIntSet>(); for ( int i = 1; i < altLook.Length; i++ ) { LookaheadSet s = altLook[i]; edges.Add( (IntervalSet)s.TokenTypeSet ); } IList<IIntSet> disjoint = MakeEdgeSetsDisjoint( edges ); //[email protected]("disjoint="+disjoint); MultiMap<IntervalSet, int> edgeMap = new MultiMap<IntervalSet, int>(); for ( int i = 0; i < disjoint.Count; i++ ) { IntervalSet ds = (IntervalSet)disjoint[i]; for ( int alt = 1; alt < altLook.Length; alt++ ) { LookaheadSet look = altLook[alt]; if ( !ds.And( look.TokenTypeSet ).IsNil ) { edgeMap.Map( ds, alt ); } } } //[email protected]("edge map: "+edgeMap); // TODO: how do we know we covered stuff? // build an LL(1) optimized DFA with edge for each altLook[i] DFA lookaheadDFA = new LL1DFA( decision, decisionStartState, edgeMap ); SetLookaheadDFA( decision, lookaheadDFA ); // create map from line:col to decision DFA (for ANTLRWorks) UpdateLineColumnToLookaheadDFAMap( lookaheadDFA ); return lookaheadDFA; }
/// <summary> /// Parses each child of <code>node</code> as an instruction, adds them to the specified instruction list. Calls are resolved to production lists /// using the specified multimap. /// </summary> private static void ParseInstructionsFromXml(XmlNode node, List<TreeCrayonInstruction> instructions, MultiMap<string, ProductionNodePair> map) { foreach (XmlNode child in node.ChildNodes) { switch (child.Name) { case "Call": String name = XmlUtil.GetString(child, "ref"); List<Production> productions = GetProductionsByRef(name, map); instructions.Add(new Call(productions, XmlUtil.GetInt(child, "delta", -1))); break; case "Child": var ch = new Child(); ParseInstructionsFromXml(child, ch.Instructions, map); instructions.Add(ch); break; case "Maybe": var maybe = new Maybe(XmlUtil.GetFloat(child, "chance", 0.50f)); ParseInstructionsFromXml(child, maybe.Instructions, map); instructions.Add(maybe); break; case "Forward": instructions.Add(new Forward(XmlUtil.GetFloat(child, "distance"), XmlUtil.GetFloat(child, "variation", 0.0f), XmlUtil.GetFloat(child, "radius", 0.86f))); break; case "Backward": instructions.Add(new Backward(XmlUtil.GetFloat(child, "distance"), XmlUtil.GetFloat(child, "variation", 0.0f))); break; case "Pitch": instructions.Add(new Pitch(XmlUtil.GetFloat(child, "angle"), XmlUtil.GetFloat(child, "variation", 0.0f))); break; case "Scale": instructions.Add(new Scale(XmlUtil.GetFloat(child, "scale"), XmlUtil.GetFloat(child, "variation", 0.0f))); break; case "ScaleRadius": instructions.Add(new ScaleRadius(XmlUtil.GetFloat(child, "scale"), XmlUtil.GetFloat(child, "variation", 0.0f))); break; case "Twist": instructions.Add(new Twist(XmlUtil.GetFloat(child, "angle", 0), XmlUtil.GetFloat(child, "variation", 360.0f))); break; case "Level": instructions.Add(new Level(XmlUtil.GetInt(child, "delta", -1))); break; case "Leaf": instructions.Add(ParseLeafFromXml(child)); break; case "Bone": instructions.Add(new Bone(XmlUtil.GetInt(child, "delta", -1))); break; case "RequireLevel": String type = XmlUtil.GetStringOrNull(child, "type"); CompareType ctype = type == "less" ? CompareType.Less : CompareType.Greater; var req = new RequireLevel(XmlUtil.GetInt(child, "level"), ctype); ParseInstructionsFromXml(child, req.Instructions, map); instructions.Add(req); break; case "Align": instructions.Add(new Align()); break; } } }
public void LoadAreaTriggerTemplates() { uint oldMSTime = Time.GetMSTime(); MultiMap <uint, Vector2> verticesByAreaTrigger = new MultiMap <uint, Vector2>(); MultiMap <uint, Vector2> verticesTargetByAreaTrigger = new MultiMap <uint, Vector2>(); MultiMap <uint, Vector3> splinesBySpellMisc = new MultiMap <uint, Vector3>(); MultiMap <uint, AreaTriggerAction> actionsByAreaTrigger = new MultiMap <uint, AreaTriggerAction>(); // 0 1 2 3 SQLResult templateActions = DB.World.Query("SELECT AreaTriggerId, ActionType, ActionParam, TargetType FROM `areatrigger_template_actions`"); if (!templateActions.IsEmpty()) { do { uint areaTriggerId = templateActions.Read <uint>(0); AreaTriggerAction action; action.Param = templateActions.Read <uint>(2); action.ActionType = (AreaTriggerActionTypes)templateActions.Read <uint>(1); action.TargetType = (AreaTriggerActionUserTypes)templateActions.Read <uint>(3); if (action.ActionType >= AreaTriggerActionTypes.Max) { Log.outError(LogFilter.Sql, "Table `areatrigger_template_actions` has invalid ActionType ({0}) for AreaTriggerId {1} and Param {2}", action.ActionType, areaTriggerId, action.Param); continue; } if (action.TargetType >= AreaTriggerActionUserTypes.Max) { Log.outError(LogFilter.Sql, "Table `areatrigger_template_actions` has invalid TargetType ({0}) for AreaTriggerId {1} and Param {2}", action.TargetType, areaTriggerId, action.Param); continue; } actionsByAreaTrigger.Add(areaTriggerId, action); }while (templateActions.NextRow()); } else { Log.outInfo(LogFilter.ServerLoading, "Loaded 0 AreaTrigger templates actions. DB table `areatrigger_template_actions` is empty."); } // 0 1 2 3 4 5 SQLResult vertices = DB.World.Query("SELECT AreaTriggerId, Idx, VerticeX, VerticeY, VerticeTargetX, VerticeTargetY FROM `areatrigger_template_polygon_vertices` ORDER BY `AreaTriggerId`, `Idx`"); if (!vertices.IsEmpty()) { do { uint areaTriggerId = vertices.Read <uint>(0); verticesByAreaTrigger.Add(areaTriggerId, new Vector2(vertices.Read <float>(2), vertices.Read <float>(3))); if (!vertices.IsNull(4) && !vertices.IsNull(5)) { verticesTargetByAreaTrigger.Add(areaTriggerId, new Vector2(vertices.Read <float>(4), vertices.Read <float>(5))); } else if (vertices.IsNull(4) != vertices.IsNull(5)) { Log.outError(LogFilter.Sql, "Table `areatrigger_template_polygon_vertices` has listed invalid target vertices (AreaTrigger: {0}, Index: {1}).", areaTriggerId, vertices.Read <uint>(1)); } }while (vertices.NextRow()); } else { Log.outInfo(LogFilter.ServerLoading, "Loaded 0 AreaTrigger templates polygon vertices. DB table `areatrigger_template_polygon_vertices` is empty."); } // 0 1 2 3 SQLResult splines = DB.World.Query("SELECT SpellMiscId, X, Y, Z FROM `spell_areatrigger_splines` ORDER BY `SpellMiscId`, `Idx`"); if (!splines.IsEmpty()) { do { uint spellMiscId = splines.Read <uint>(0); Vector3 spline = new Vector3(splines.Read <float>(1), splines.Read <float>(2), splines.Read <float>(3)); splinesBySpellMisc.Add(spellMiscId, spline); }while (splines.NextRow()); } else { Log.outInfo(LogFilter.ServerLoading, "Loaded 0 AreaTrigger templates splines. DB table `spell_areatrigger_splines` is empty."); } // 0 1 2 3 4 5 6 7 8 9 SQLResult templates = DB.World.Query("SELECT Id, Type, Flags, Data0, Data1, Data2, Data3, Data4, Data5, ScriptName FROM `areatrigger_template`"); if (!templates.IsEmpty()) { do { AreaTriggerTemplate areaTriggerTemplate = new AreaTriggerTemplate(); areaTriggerTemplate.Id = templates.Read <uint>(0); AreaTriggerTypes type = (AreaTriggerTypes)templates.Read <byte>(1); if (type >= AreaTriggerTypes.Max) { Log.outError(LogFilter.Sql, "Table `areatrigger_template` has listed areatrigger (Id: {0}) with invalid type {1}.", areaTriggerTemplate.Id, type); continue; } areaTriggerTemplate.TriggerType = type; areaTriggerTemplate.Flags = (AreaTriggerFlags)templates.Read <uint>(2); unsafe { fixed(float *b = areaTriggerTemplate.DefaultDatas.Data) { for (byte i = 0; i < SharedConst.MaxAreatriggerEntityData; ++i) { b[i] = templates.Read <float>(3 + i); } } } areaTriggerTemplate.ScriptId = Global.ObjectMgr.GetScriptId(templates.Read <string>(9)); areaTriggerTemplate.PolygonVertices = verticesByAreaTrigger[areaTriggerTemplate.Id]; areaTriggerTemplate.PolygonVerticesTarget = verticesTargetByAreaTrigger[areaTriggerTemplate.Id]; areaTriggerTemplate.Actions = actionsByAreaTrigger[areaTriggerTemplate.Id]; areaTriggerTemplate.InitMaxSearchRadius(); _areaTriggerTemplateStore[areaTriggerTemplate.Id] = areaTriggerTemplate; }while (templates.NextRow()); } // 0 1 2 3 4 5 6 7 8 SQLResult areatriggerSpellMiscs = DB.World.Query("SELECT SpellMiscId, AreaTriggerId, MoveCurveId, ScaleCurveId, MorphCurveId, FacingCurveId, DecalPropertiesId, TimeToTarget, TimeToTargetScale FROM `spell_areatrigger`"); if (!areatriggerSpellMiscs.IsEmpty()) { do { AreaTriggerMiscTemplate miscTemplate = new AreaTriggerMiscTemplate(); miscTemplate.MiscId = areatriggerSpellMiscs.Read <uint>(0); uint areatriggerId = areatriggerSpellMiscs.Read <uint>(1); miscTemplate.Template = GetAreaTriggerTemplate(areatriggerId); if (miscTemplate.Template == null) { Log.outError(LogFilter.Sql, "Table `spell_areatrigger` reference invalid AreaTriggerId {0} for miscId {1}", areatriggerId, miscTemplate.MiscId); continue; } Func <uint, uint> ValidateAndSetCurve = value => { if (value != 0 && !CliDB.CurveStorage.ContainsKey(value)) { Log.outError(LogFilter.Sql, "Table `spell_areatrigger` has listed areatrigger (MiscId: {0}, Id: {1}) with invalid Curve ({2}), set to 0!", miscTemplate.MiscId, areatriggerId, value); return(0); } return(value); }; miscTemplate.MoveCurveId = ValidateAndSetCurve(areatriggerSpellMiscs.Read <uint>(2)); miscTemplate.ScaleCurveId = ValidateAndSetCurve(areatriggerSpellMiscs.Read <uint>(3)); miscTemplate.MorphCurveId = ValidateAndSetCurve(areatriggerSpellMiscs.Read <uint>(4)); miscTemplate.FacingCurveId = ValidateAndSetCurve(areatriggerSpellMiscs.Read <uint>(5)); miscTemplate.DecalPropertiesId = areatriggerSpellMiscs.Read <uint>(6); miscTemplate.TimeToTarget = areatriggerSpellMiscs.Read <uint>(7); miscTemplate.TimeToTargetScale = areatriggerSpellMiscs.Read <uint>(8); miscTemplate.SplinePoints = splinesBySpellMisc[miscTemplate.MiscId]; _areaTriggerTemplateSpellMisc[miscTemplate.MiscId] = miscTemplate; }while (areatriggerSpellMiscs.NextRow()); } else { Log.outInfo(LogFilter.ServerLoading, "Loaded 0 Spell AreaTrigger templates. DB table `spell_areatrigger` is empty."); } Log.outInfo(LogFilter.ServerLoading, "Loaded {0} spell areatrigger templates in {1} ms.", _areaTriggerTemplateStore.Count, Time.GetMSTimeDiffToNow(oldMSTime)); }
public MultiMapEnumerator(MultiMap <TKey, TValue> map) { _map = map; Reset(); }
public void LoadCharacterTemplates() { uint oldMSTime = Time.GetMSTime(); _characterTemplateStore.Clear(); MultiMap <uint, CharacterTemplateClass> characterTemplateClasses = new MultiMap <uint, CharacterTemplateClass>(); SQLResult classesResult = DB.World.Query("SELECT TemplateId, FactionGroup, Class FROM character_template_class"); if (!classesResult.IsEmpty()) { do { uint templateId = classesResult.Read <uint>(0); FactionMasks factionGroup = (FactionMasks)classesResult.Read <byte>(1); byte classID = classesResult.Read <byte>(2); if (!((factionGroup & (FactionMasks.Player | FactionMasks.Alliance)) == (FactionMasks.Player | FactionMasks.Alliance)) && !((factionGroup & (FactionMasks.Player | FactionMasks.Horde)) == (FactionMasks.Player | FactionMasks.Horde))) { Log.outError(LogFilter.Sql, "Faction group {0} defined for character template {1} in `character_template_class` is invalid. Skipped.", factionGroup, templateId); continue; } if (!CliDB.ChrClassesStorage.ContainsKey(classID)) { Log.outError(LogFilter.Sql, "Class {0} defined for character template {1} in `character_template_class` does not exists, skipped.", classID, templateId); continue; } characterTemplateClasses.Add(templateId, new CharacterTemplateClass(factionGroup, classID)); }while (classesResult.NextRow()); } else { Log.outInfo(LogFilter.ServerLoading, "Loaded 0 character template classes. DB table `character_template_class` is empty."); } SQLResult templates = DB.World.Query("SELECT Id, Name, Description, Level FROM character_template"); if (templates.IsEmpty()) { Log.outInfo(LogFilter.ServerLoading, "Loaded 0 character templates. DB table `character_template` is empty."); return; } do { CharacterTemplate templ = new CharacterTemplate(); templ.TemplateSetId = templates.Read <uint>(0); templ.Name = templates.Read <string>(1); templ.Description = templates.Read <string>(2); templ.Level = templates.Read <byte>(3); templ.Classes = characterTemplateClasses[templ.TemplateSetId]; if (templ.Classes.Empty()) { Log.outError(LogFilter.Sql, "Character template {0} does not have any classes defined in `character_template_class`. Skipped.", templ.TemplateSetId); continue; } _characterTemplateStore[templ.TemplateSetId] = templ; }while (templates.NextRow()); Log.outInfo(LogFilter.ServerLoading, "Loaded {0} character templates in {1} ms.", _characterTemplateStore.Count, Time.GetMSTimeDiffToNow(oldMSTime)); }
/// <summary> /// Does /// <code>pattern</code> /// matched as rule patternRuleIndex match tree? Pass in a /// compiled pattern instead of a string representation of a tree pattern. /// </summary> public virtual bool Matches(IParseTree tree, ParseTreePattern pattern) { MultiMap<string, IParseTree> labels = new MultiMap<string, IParseTree>(); IParseTree mismatchedNode = MatchImpl(tree, pattern.PatternTree, labels); return mismatchedNode == null; }
private static IList <PluginData> TopologicalSortByDependencies(IList <PluginData> plugins) { Dictionary <string, PluginData> pluginsById = new Dictionary <string, PluginData>(); Dictionary <PluginData, int> outgoingDependencyCounts = new Dictionary <PluginData, int>(); MultiMap <PluginData, PluginData> incomingPluginDependencies = new MultiMap <PluginData, PluginData>(); Queue <PluginData> isolatedPlugins = new Queue <PluginData>(); foreach (PluginData plugin in plugins) { pluginsById[plugin.Plugin.PluginId] = plugin; outgoingDependencyCounts[plugin] = 0; } foreach (PluginData plugin in plugins) { foreach (Dependency dependency in plugin.Plugin.Dependencies) { PluginData pluginDependency; if (pluginsById.TryGetValue(dependency.PluginId, out pluginDependency)) { incomingPluginDependencies.Add(pluginDependency, plugin); outgoingDependencyCounts[plugin] += 1; } } } foreach (var pair in outgoingDependencyCounts) { if (pair.Value == 0) { isolatedPlugins.Enqueue(pair.Key); } } List <PluginData> result = new List <PluginData>(plugins.Count); while (isolatedPlugins.Count != 0) { PluginData plugin = isolatedPlugins.Dequeue(); result.Add(plugin); foreach (PluginData incomingPluginDependency in incomingPluginDependencies[plugin]) { int newCount = outgoingDependencyCounts[incomingPluginDependency] -= 1; if (newCount == 0) { isolatedPlugins.Enqueue(incomingPluginDependency); } } } if (result.Count != plugins.Count) { StringBuilder message = new StringBuilder(); message.Append("Could not topologically sort the following plugins either due to dependency cycles or duplicate dependencies: "); bool first = true; foreach (var pair in outgoingDependencyCounts) { if (pair.Value != 0) { if (first) { first = false; } else { message.Append(", "); } message.Append("'").Append(pair.Key.Plugin.PluginId).Append("'"); } } message.Append("."); throw new RuntimeException(message.ToString()); } return(result); }
public CreatureTextRepeatHolder(ulong guid) { Guid = guid; Groups = new MultiMap <byte, byte>(); }
public CreatureTextHolder(uint entry) { Entry = entry; Groups = new MultiMap <uint, CreatureTextEntry>(); }
public SpawnSpec(System.Random rand, MultiMap <GridSpace> room) { Random = rand; Room = room; }
public override void SetInputSupervoxels(SupervoxelClusters <PointXYZ> clusters, MultiMap <uint, uint> adjacency) => Invoke.segmentation_lccpsegmentation_xyz_setInputSupervoxels(_ptr, clusters, adjacency);
public void Dispose() { _keyEnumerator = null; _valueEnumerator = null; _map = null; }
public void TestCase() { var map = new MultiMap<string,string>(); { map.Add("key1", "value11"); map.Add("key1", "value12"); map.Add("key1", "value13"); map.Add("key2", "value21"); map.Add("key2", "value22"); map.Add("key3", "value31"); } Assert.AreEqual(3, map.Keys.Count); CollectionAssert.AreEqual(new []{ "key1", "key2", "key3" }, map.Keys); Assert.AreEqual(3, map.Values.Count); Assert.IsTrue(map.ContainsKey("key1")); Assert.IsTrue(map.ContainsKey("key2")); Assert.IsTrue(map.ContainsKey("key3")); Assert.IsTrue(map.ContainsValue("value11")); Assert.IsTrue(map.ContainsValue("value12")); Assert.IsTrue(map.ContainsValue("value13")); Assert.IsTrue(map.ContainsValue("value21")); Assert.IsTrue(map.ContainsValue("value22")); Assert.IsTrue(map.ContainsValue("value31")); Assert.AreEqual(3, map["key1"].Count); CollectionAssert.AreEqual(new []{ "value11", "value12", "value13" }, map["key1"]); Assert.AreEqual(2, map["key2"].Count); CollectionAssert.AreEqual(new []{ "value21", "value22" }, map["key2"]); Assert.AreEqual(1, map["key3"].Count); CollectionAssert.AreEqual(new []{ "value31" }, map["key3"]); using (var tmpfile = new TemporaryFile()) { PersistenceHelper.SaveObj(map, tmpfile.FullName); var deserialized = PersistenceHelper.RestoreObj<MultiMap<string,string>>(tmpfile.FullName); Assert.AreEqual(3, deserialized.Keys.Count); CollectionAssert.AreEqual(new []{ "key1", "key2", "key3" }, deserialized.Keys); Assert.AreEqual(3, deserialized.Values.Count); Assert.IsTrue(deserialized.ContainsKey("key1")); Assert.IsTrue(deserialized.ContainsKey("key2")); Assert.IsTrue(deserialized.ContainsKey("key3")); Assert.IsTrue(deserialized.ContainsValue("value11")); Assert.IsTrue(deserialized.ContainsValue("value12")); Assert.IsTrue(deserialized.ContainsValue("value13")); Assert.IsTrue(deserialized.ContainsValue("value21")); Assert.IsTrue(deserialized.ContainsValue("value22")); Assert.IsTrue(deserialized.ContainsValue("value31")); } }
public void LinkTagsToCurrentDevicePaths(MultiMap<string, string> pathToTags, SQLiteCommand cmd) { string devDescription = Configuration.GetLocalDeviceDescription(); EnsureDevice(devDescription, cmd); foreach (string path in pathToTags.Keys) { EnsurePath(path, cmd); EnsureDevicePath(devDescription, path, cmd); foreach (string tag in pathToTags[path]) { InsertOrIgnoreTag(tag, cmd); LinkDevicePathToTag(devDescription, path, tag, cmd); } } }
public DebugTemplate() { newSTEvent = new ConstructionEvent(); addAttrEvents = new MultiMap <string, AddAttributeEvent>(); }
protected internal virtual IParseTree MatchImpl(IParseTree tree, IParseTree patternTree, MultiMap<string, IParseTree> labels) { if (tree == null) { throw new ArgumentException("tree cannot be null"); } if (patternTree == null) { throw new ArgumentException("patternTree cannot be null"); } // x and <ID>, x and y, or x and x; or could be mismatched types if (tree is ITerminalNode && patternTree is ITerminalNode) { ITerminalNode t1 = (ITerminalNode)tree; ITerminalNode t2 = (ITerminalNode)patternTree; IParseTree mismatchedNode = null; // both are tokens and they have same type if (t1.Symbol.Type == t2.Symbol.Type) { if (t2.Symbol is TokenTagToken) { // x and <ID> TokenTagToken tokenTagToken = (TokenTagToken)t2.Symbol; // track label->list-of-nodes for both token name and label (if any) labels.Map(tokenTagToken.TokenName, tree); if (tokenTagToken.Label != null) { labels.Map(tokenTagToken.Label, tree); } } else { if (t1.GetText().Equals(t2.GetText(), StringComparison.Ordinal)) { } else { // x and x // x and y if (mismatchedNode == null) { mismatchedNode = t1; } } } } else { if (mismatchedNode == null) { mismatchedNode = t1; } } return mismatchedNode; } if (tree is ParserRuleContext && patternTree is ParserRuleContext) { ParserRuleContext r1 = (ParserRuleContext)tree; ParserRuleContext r2 = (ParserRuleContext)patternTree; IParseTree mismatchedNode = null; // (expr ...) and <expr> RuleTagToken ruleTagToken = GetRuleTagToken(r2); if (ruleTagToken != null) { if (r1.RuleIndex == r2.RuleIndex) { // track label->list-of-nodes for both rule name and label (if any) labels.Map(ruleTagToken.RuleName, tree); if (ruleTagToken.Label != null) { labels.Map(ruleTagToken.Label, tree); } } else { if (mismatchedNode == null) { mismatchedNode = r1; } } return mismatchedNode; } // (expr ...) and (expr ...) if (r1.ChildCount != r2.ChildCount) { if (mismatchedNode == null) { mismatchedNode = r1; } return mismatchedNode; } int n = r1.ChildCount; for (int i = 0; i < n; i++) { IParseTree childMatch = MatchImpl(r1.GetChild(i), patternTree.GetChild(i), labels); if (childMatch != null) { return childMatch; } } return mismatchedNode; } // if nodes aren't both tokens or both rule nodes, can't match return tree; }
public DebugTemplate(Template prototype) : base(prototype) { newSTEvent = new ConstructionEvent(); addAttrEvents = new MultiMap <string, AddAttributeEvent>(); }
protected void ClusterAround(LayoutObjectContainer cluster, LayoutObject obj) { #region Debug if (BigBoss.Debug.logging(Logs.LevelGen)) { BigBoss.Debug.printHeader("Cluster Around"); } #endregion obj.ShiftOutside(cluster, new Point(1, 0), null, false, false); obj.Shift(-1, 0); // Shift to overlapping slightly MultiMap <bool> visited = new MultiMap <bool>(); visited[0, 0] = true; ProbabilityList <ClusterInfo> shiftOptions = new ProbabilityList <ClusterInfo>(); Queue <Point> shiftQueue = new Queue <Point>(); shiftQueue.Enqueue(new Point()); Container2D <GenSpace> clusterGrid = cluster.GetGrid(); Container2D <GenSpace> objGrid = obj.GetGrid(); #region Debug if (BigBoss.Debug.logging(Logs.LevelGen)) { var tmp = new MultiMap <GenSpace>(); tmp.PutAll(obj.GetGrid()); tmp.PutAll(cluster.GetGrid()); tmp.ToLog(Logs.LevelGen, "Starting placement"); } #endregion while (shiftQueue.Count > 0) { Point curShift = shiftQueue.Dequeue(); #region Debug if (BigBoss.Debug.Flag(DebugManager.DebugFlag.FineSteps) && BigBoss.Debug.logging(Logs.LevelGen)) { var tmpMap = new MultiMap <GenSpace>(); tmpMap.PutAll(clusterGrid); tmpMap.PutAll(objGrid, curShift); tmpMap.ToLog(Logs.LevelGen, "Analyzing at shift " + curShift); } #endregion // Test if pass List <Point> intersectPoints = new List <Point>(); if (objGrid.DrawAll((arr, x, y) => { if (GridTypeEnum.EdgeType(arr[x, y].GetGridType())) { GridType clusterType = clusterGrid[x + curShift.x, y + curShift.y].GetGridType(); if (clusterType == GridType.NULL) { return(true); } intersectPoints.Add(new Point(x, y)); return(GridTypeEnum.EdgeType(clusterType)); } else { return(!clusterGrid.Contains(x + curShift.x, y + curShift.y)); } }) && intersectPoints.Count > 0) { // Passed test // queue surrounding points visited.DrawAround(curShift.x, curShift.y, true, Draw.Not(Draw.EqualTo(true)).IfThen(Draw.AddTo <bool>(shiftQueue).And(Draw.SetTo(true)))); #region Debug if (BigBoss.Debug.Flag(DebugManager.DebugFlag.FineSteps) && BigBoss.Debug.logging(Logs.LevelGen)) { BigBoss.Debug.w(Logs.LevelGen, "passed with " + intersectPoints.Count); } #endregion shiftOptions.Add(new ClusterInfo() { Shift = curShift, Intersects = intersectPoints }, Math.Pow(intersectPoints.Count, 3)); } } #region Debug if (BigBoss.Debug.logging(Logs.LevelGen)) { shiftOptions.ToLog(Logs.LevelGen, "Shift options"); } #endregion List <Point> clusterDoorOptions = new List <Point>(); ClusterInfo info; var placed = new List <Value2D <GenSpace> >(0); while (shiftOptions.Take(Rand, out info)) { clusterGrid.DrawPoints(info.Intersects, Draw.CanDrawDoor().IfThen(Draw.AddTo <GenSpace>(clusterDoorOptions)).Shift(info.Shift)); #region Debug if (BigBoss.Debug.logging(Logs.LevelGen)) { BigBoss.Debug.w(Logs.LevelGen, "selected " + info.Shift); var tmpMap = new MultiMap <GenSpace>(); clusterGrid.DrawAll(Draw.CopyTo(tmpMap)); objGrid.DrawAll(Draw.CopyTo(tmpMap, info.Shift)); tmpMap.DrawPoints(info.Intersects, Draw.SetTo(GridType.INTERNAL_RESERVED_CUR, Theme).Shift(info.Shift)); tmpMap.ToLog(Logs.LevelGen, "Intersect Points"); tmpMap = new MultiMap <GenSpace>(); clusterGrid.DrawAll(Draw.CopyTo(tmpMap)); objGrid.DrawAll(Draw.CopyTo(tmpMap, info.Shift)); tmpMap.DrawPoints(clusterDoorOptions, Draw.SetTo(GridType.Door, Theme)); tmpMap.ToLog(Logs.LevelGen, "Cluster door options"); } #endregion if (clusterDoorOptions.Count > 0) { // Cluster side has door options obj.Shift(info.Shift.x, info.Shift.y); placed = obj.PlaceSomeDoors(clusterDoorOptions, Theme, Rand); if (placed.Count != 0) { // Placed a door foreach (Point p in placed) { LayoutObject clusterObj; cluster.GetObjAt(p, out clusterObj); obj.Connect(clusterObj); } break; } else { #region Debug if (BigBoss.Debug.logging(Logs.LevelGen)) { BigBoss.Debug.w(Logs.LevelGen, "selected point failed to match " + info.Shift + ". Backing up"); } #endregion obj.Shift(-info.Shift.x, -info.Shift.y); } } } if (placed.Count == 0) { throw new ArgumentException("Could not cluster rooms"); } #region Debug if (BigBoss.Debug.logging(Logs.LevelGen)) { var tmpMap = new MultiMap <GenSpace>(); tmpMap.PutAll(clusterGrid); tmpMap.PutAll(obj.GetGrid()); tmpMap.ToLog(Logs.LevelGen, "Final setup " + info.Shift); BigBoss.Debug.printFooter("Cluster Around"); } #endregion }
public static void Proto2CS(string protoName, string outputPath, int startOpcode, HeadFlag flag) { parentMsg = new MultiMap <string, string>(); string proto = Path.Combine(protoPath, protoName); string csPath = Path.Combine(outputPath, Path.GetFileNameWithoutExtension(proto) + ".cs"); string s = File.ReadAllText(proto); StringBuilder sb = new StringBuilder(); sb.Append("using ProtoBuf;\n"); sb.Append("using System.Collections.Generic;\n"); sb.Append("using MongoDB.Bson.Serialization.Attributes;\n"); sb.Append("namespace Model\n"); sb.Append("{\n"); bool isMsgStart = false; foreach (string line in s.Split('\n')) { string newline = line.Trim(); if (newline == "") { continue; } if (newline.StartsWith("//")) { sb.Append($"{newline}\n"); } if (newline.StartsWith("message")) { isMsgStart = true; string msgName = newline.Split(splitChars, StringSplitOptions.RemoveEmptyEntries)[1]; string[] ss = newline.Split(new [] { "//" }, StringSplitOptions.RemoveEmptyEntries); string parentClass = ""; if (ss.Length == 2) { parentClass = ss[1]; parentMsg.Add(parentClass, msgName); } msgOpcode.Add(new OpcodeInfo() { Name = msgName, Opcode = ++startOpcode }); sb.Append($"\t[Message(Opcode.{msgName})]\n"); sb.Append($"\t[ProtoContract]\n"); sb.Append($"\tpublic partial class {msgName}"); if (parentClass != "") { sb.Append($": {parentClass}\n"); } else { sb.Append("\n"); } } if (isMsgStart && newline == "{") { sb.Append("\t{\n"); } // 成员 if (newline.StartsWith("required")) { Members(sb, newline, true); } if (newline.StartsWith("optional")) { Members(sb, newline, false); } if (newline.StartsWith("repeated")) { Repeated(sb, newline); } if (isMsgStart && newline == "}") { isMsgStart = false; sb.Append("\t}\n\n"); } } GenerateHead(sb, flag); sb.Append("}\n"); File.WriteAllText(csPath, sb.ToString()); }
public void SetUp() { _multiMap = new MultiMap <string, string>(); }
public abstract void SetInputSupervoxels(SupervoxelClusters <PointT> clusters, MultiMap <uint, uint> adjacency);
public static void Proto2CS(string ns, string protoName, string outputPath, string opcodeClassName, int startOpcode, HeadFlag flag, bool isClient = true) { msgOpcode.Clear(); parentMsg = new MultiMap <string, string>(); string proto = Path.Combine(protoPath, protoName); string csPath = Path.Combine(outputPath, Path.GetFileNameWithoutExtension(proto) + ".cs"); string s = File.ReadAllText(proto); StringBuilder sb = new StringBuilder(); sb.Append("using ProtoBuf;\n"); sb.Append("using ETModel;\n"); sb.Append("using System.Collections.Generic;\n"); sb.Append("using MongoDB.Bson.Serialization.Attributes;\n"); sb.Append($"namespace {ns}\n"); sb.Append("{\n"); bool isMsgStart = false; string parentClass = ""; foreach (string line in s.Split('\n')) { string newline = line.Trim(); if (newline == "") { continue; } if (newline.StartsWith("//")) { sb.Append($"{newline}\n"); } if (newline.StartsWith("message")) { parentClass = ""; isMsgStart = true; string msgName = newline.Split(splitChars, StringSplitOptions.RemoveEmptyEntries)[1]; string[] ss = newline.Split(new [] { "//" }, StringSplitOptions.RemoveEmptyEntries); if (ss.Length == 2) { parentClass = ss[1].Trim(); } msgOpcode.Add(new OpcodeInfo() { Name = msgName, Opcode = ++startOpcode }); sb.Append($"\t[Message({opcodeClassName}.{msgName})]\n"); sb.Append($"\t[ProtoContract]\n"); sb.Append($"\tpublic partial class {msgName}"); if (parentClass == "IActorMessage" || parentClass == "IActorRequest" || parentClass == "IActorResponse" || parentClass == "IFrameMessage") { sb.Append($": {parentClass}\n"); } else if (parentClass != "") { sb.Append($": {parentClass}\n"); } else { sb.Append("\n"); } } if (isMsgStart && newline == "{") { sb.Append("\t{\n"); if (parentClass == "IRequest" || parentClass == "IActorRequest" || parentClass == "IActorMessage" || parentClass == "IFrameMessage") { sb.AppendLine("\t\t[ProtoMember(90, IsRequired = true)]"); sb.AppendLine("\t\tpublic int RpcId { get; set; }\n"); } if (parentClass == "IResponse" || parentClass == "IActorResponse") { sb.AppendLine("\t\t[ProtoMember(90, IsRequired = true)]"); sb.AppendLine("\t\tpublic int RpcId { get; set; }\n"); sb.AppendLine("\t\t[ProtoMember(91, IsRequired = true)]"); sb.AppendLine("\t\tpublic int Error { get; set; }\n"); sb.AppendLine("\t\t[ProtoMember(92, IsRequired = true)]"); sb.AppendLine("\t\tpublic string Message { get; set; }\n"); } if (parentClass == "IActorRequest" || parentClass == "IActorMessage") { sb.AppendLine("\t\t[ProtoMember(93, IsRequired = true)]"); sb.AppendLine("\t\tpublic long ActorId { get; set; }\n"); } if (parentClass == "IFrameMessage") { sb.AppendLine("\t\t[ProtoMember(94, IsRequired = true)]"); sb.AppendLine("\t\tpublic long Id { get; set; }\n"); } } // 成员 if (newline.StartsWith("required")) { Members(sb, newline, true); } if (newline.StartsWith("optional")) { Members(sb, newline, false); } if (newline.StartsWith("repeated")) { Repeated(sb, ns, newline, isClient); } if (isMsgStart && newline == "}") { isMsgStart = false; sb.Append("\t}\n\n"); } } sb.Append("}\n"); //if (!isClient) //{ //GenerateHead(sb, ns, flag, opcodeClassName); //} File.WriteAllText(csPath, sb.ToString()); }
public override void OnInspectorGUI() { if (EditorUI.DrawHeader("Sprite -> Texture")) { EditorUI.BeginContents(); EditorGUILayout.BeginHorizontal(); EditorGUIUtil.ObjectField <Object>("Folder", ref texFolder, false); if (GUILayout.Button("Sprite -> Texture") && EditorUtility.DisplayDialog("Warning", "BackUp?", "OK", "Cancel")) { ConvertToTexture(texFolder); } EditorGUILayout.EndHorizontal(); EditorGUIUtil.ObjectFieldReorderList(texList); EditorUI.EndContents(); } if (EditorUI.DrawHeader("Texture -> Sprite")) { EditorUI.BeginContents(); EditorGUILayout.BeginHorizontal(); ComponentSelector.Draw <UIAtlas>("Atlas", atlasToAdd, OnSelectAtlas, true, GUILayout.MinWidth(80f)); if (GUILayout.Button("Add Selected")) { foreach (Object o in Selection.objects) { if (o is GameObject) { OnSelectAtlas((o as GameObject).GetComponent <UIAtlas>()); } } } EditorGUILayout.EndHorizontal(); EditorGUIUtil.TextField("Search sprite", ref searchSpriteName); if (searchSpriteName.IsNotEmpty()) { List <UIAtlas> filtered = new List <UIAtlas>(); foreach (UIAtlas a in atlasRefs) { if (a.GetSprite(searchSpriteName) != null) { filtered.Add(a); } } EditorGUIUtil.ObjectFieldReorderList(filtered); EditorGUILayout.BeginHorizontal(); EditorGUIUtil.Popup("Change to", ref changeAtlas, filtered); if (GUILayout.Button("Apply")) { BuildScript.ForEachPrefab((path, prefab) => { ChangeAtlas(prefab, filtered, changeAtlas); return(null); }); BuildScript.ForEachScene(list => { foreach (Transform t in list) { ChangeAtlas(t.gameObject, filtered, changeAtlas); } return(null); }); } EditorGUILayout.EndHorizontal(); EditorGUIUtil.ObjectFieldReorderList(spriteList); } else { if (EditorGUIUtil.ObjectFieldReorderList(atlasRefs)) { SaveAtlasRefs(); } } if (dupSprites.IsNotEmpty()) { if (EditorUI.DrawHeader("Duplicate sprites")) { EditorUI.BeginContents(); float cellWidth = 200f; float width = GetWidth(); int column = Mathf.Max((int)(width / cellWidth), 1); int i = 0; foreach (string d in dupSprites) { if (i == 0) { EditorGUILayout.BeginHorizontal(); } if (GUILayout.Button(d, GUILayout.Width(200))) { searchSpriteName = d; } i = i + 1; if (i == column) { EditorGUILayout.EndHorizontal(); i = 0; } } if (i != 0) { EditorGUILayout.EndHorizontal(); } EditorUI.EndContents(); } } EditorGUILayout.BeginHorizontal(); EditorGUIUtil.ObjectField("Target", ref targetObj, true); GUI.enabled = targetObj != null; if (GUILayout.Button("Convert to Sprite") && EditorUtility.DisplayDialog("Warning", "BackUp?", "OK")) { ConvertToSprite(); } if (GUILayout.Button("Set TexSetter")) { foreach (UITexture tex in targetObj.GetComponentsInChildren <UITexture>(true)) { TexSetterInspector.SetIfCdn(tex); } } GUI.enabled = true; EditorGUILayout.EndHorizontal(); // collect atlas GUI.enabled = targetObj != null; if (EditorUI.DrawHeader("Member Atlases")) { EditorUI.BeginContents(); if (targetObj != null) { MultiMap <UIAtlas, UISprite> collect = new MultiMap <UIAtlas, UISprite>(); foreach (UISprite s in targetObj.GetComponentsInChildren <UISprite>(true)) { collect.Add(s.atlas, s); } foreach (KeyValuePair <UIAtlas, List <UISprite> > pair in collect) { if (EditorGUILayout.Foldout(folding.Contains(pair.Key), pair.Key.name)) { folding.Add(pair.Key); EditorGUI.indentLevel++; foreach (UISprite s in pair.Value) { EditorGUILayout.ObjectField(s.gameObject, typeof(GameObject), true); } EditorGUI.indentLevel--; } else { folding.Remove(pair.Key); } } } EditorUI.EndContents(); } if (EditorUI.DrawHeader("Orphan Texture")) { EditorUI.BeginContents(); if (targetObj != null) { foreach (UITexture tex in targetObj.GetComponentsInChildren <UITexture>(true)) { if (tex.GetComponent <TexLoader>() == null) { EditorGUILayout.BeginHorizontal(); EditorGUILayout.ObjectField(tex.gameObject, typeof(GameObject), true); EditorGUILayout.ObjectField(tex.mainTexture, typeof(Texture), false); EditorGUILayout.EndHorizontal(); } } } EditorUI.EndContents(); } GUI.enabled = true; EditorUI.EndContents(); } if (EditorUI.DrawHeader("Find All Sprites")) { EditorUI.BeginContents(); EditorGUILayout.BeginHorizontal(); ComponentSelector.Draw <UIAtlas>("Atlas", atlas4Sprite, OnSelectAtlasForSprite, true, GUILayout.MinWidth(80f)); if (GUILayout.Button("Find")) { var list = Resources.FindObjectsOfTypeAll <UISprite>().ToList(i => i as UISprite); s4a.Clear(); foreach (var s in list) { if (s.atlas == atlas4Sprite) { s4a.Add(s); } } } EditorGUILayout.EndHorizontal(); EditorUI.EndContents(); } GUI.enabled = true; }
public void InitializeMultiMaps() { _marshalledObjects = new MultiMap<Int32>(); _visitedElements = new MultiMap<Int32>(); _needsAttributeHashCode = new MultiMap<Int32>(); _unmarshalledObjects = new Dictionary<string, object>(); }
public void InitializeParentMapData(MultiMap <uint, uint> mapData) { _parentMapData = mapData; }
internal Block() { Siblings = new HashSet <string>(); Vars = new MultiMap <string, string>(); }
public AvailableHotfixes(int hotfixCacheVersion, uint hotfixCount, MultiMap <int, Tuple <uint, int> > hotfixes) : base(ServerOpcodes.AvailableHotfixes) { HotfixCacheVersion = hotfixCacheVersion; HotfixCount = hotfixCount; Hotfixes = hotfixes; }
/** Walk each NFA configuration in this DFA state looking for a conflict * where (s|i|ctx) and (s|j|ctx) exist, indicating that state s with * context conflicting ctx predicts alts i and j. Return an Integer set * of the alternative numbers that conflict. Two contexts conflict if * they are equal or one is a stack suffix of the other or one is * the empty context. * * Use a hash table to record the lists of configs for each state * as they are encountered. We need only consider states for which * there is more than one configuration. The configurations' predicted * alt must be different or must have different contexts to avoid a * conflict. * * Don't report conflicts for DFA states that have conflicting Tokens * rule NFA states; they will be resolved in favor of the first rule. */ protected virtual HashSet<int> GetConflictingAlts() { // TODO this is called multiple times: cache result? //[email protected]("getNondetAlts for DFA state "+stateNumber); HashSet<int> nondeterministicAlts = new HashSet<int>(); // If only 1 NFA conf then no way it can be nondeterministic; // save the overhead. There are many o-a->o NFA transitions // and so we save a hash map and iterator creation for each // state. int numConfigs = nfaConfigurations.Size(); if ( numConfigs <= 1 ) { return null; } // First get a list of configurations for each state. // Most of the time, each state will have one associated configuration. MultiMap<int, NFAConfiguration> stateToConfigListMap = new MultiMap<int, NFAConfiguration>(); for ( int i = 0; i < numConfigs; i++ ) { NFAConfiguration configuration = (NFAConfiguration)nfaConfigurations.Get( i ); int stateI = configuration.state; stateToConfigListMap.Map( stateI, configuration ); } // potential conflicts are states with > 1 configuration and diff alts ICollection<int> states = stateToConfigListMap.Keys.ToArray(); int numPotentialConflicts = 0; foreach ( int stateI in states ) { bool thisStateHasPotentialProblem = false; var configsForState = stateToConfigListMap.get( stateI ); int alt = 0; int numConfigsForState = configsForState.Count; for ( int i = 0; i < numConfigsForState && numConfigsForState > 1; i++ ) { NFAConfiguration c = (NFAConfiguration)configsForState[i]; if ( alt == 0 ) { alt = c.alt; } else if ( c.alt != alt ) { /* [email protected]("potential conflict in state "+stateI+ " configs: "+configsForState); */ // 11/28/2005: don't report closures that pinch back // together in Tokens rule. We want to silently resolve // to the first token definition ala lex/flex by ignoring // these conflicts. // Also this ensures that lexers look for more and more // characters (longest match) before resorting to predicates. // TestSemanticPredicates.testLexerMatchesLongestThenTestPred() // for example would terminate at state s1 and test predicate // meaning input "ab" would test preds to decide what to // do but it should match rule C w/o testing preds. if ( dfa.nfa.grammar.type != GrammarType.Lexer || !dfa.NFADecisionStartState.enclosingRule.Name.Equals( Grammar.ArtificialTokensRuleName ) ) { numPotentialConflicts++; thisStateHasPotentialProblem = true; } } } if ( !thisStateHasPotentialProblem ) { // remove NFA state's configurations from // further checking; no issues with it // (can't remove as it's concurrent modification; set to null) stateToConfigListMap[stateI] = null; } } // a fast check for potential issues; most states have none if ( numPotentialConflicts == 0 ) { return null; } // we have a potential problem, so now go through config lists again // looking for different alts (only states with potential issues // are left in the states set). Now we will check context. // For example, the list of configs for NFA state 3 in some DFA // state might be: // [3|2|[28 18 $], 3|1|[28 $], 3|1, 3|2] // I want to create a map from context to alts looking for overlap: // [28 18 $] -> 2 // [28 $] -> 1 // [$] -> 1,2 // Indeed a conflict exists as same state 3, same context [$], predicts // alts 1 and 2. // walk each state with potential conflicting configurations foreach ( int stateI in states ) { var configsForState = stateToConfigListMap.get( stateI ); // compare each configuration pair s, t to ensure: // s.ctx different than t.ctx if s.alt != t.alt int numConfigsForState = 0; if ( configsForState != null ) { numConfigsForState = configsForState.Count; } for ( int i = 0; i < numConfigsForState; i++ ) { NFAConfiguration s = configsForState[i]; for ( int j = i + 1; j < numConfigsForState; j++ ) { NFAConfiguration t = configsForState[j]; // conflicts means s.ctx==t.ctx or s.ctx is a stack // suffix of t.ctx or vice versa (if alts differ). // Also a conflict if s.ctx or t.ctx is empty if ( s.alt != t.alt && s.context.ConflictsWith( t.context ) ) { nondeterministicAlts.Add( s.alt ); nondeterministicAlts.Add( t.alt ); } } } } if ( nondeterministicAlts.Count == 0 ) { return null; } return nondeterministicAlts; }
protected internal virtual IParseTree MatchImpl(IParseTree tree, IParseTree patternTree, MultiMap <string, IParseTree> labels) { if (tree == null) { throw new ArgumentException("tree cannot be null"); } if (patternTree == null) { throw new ArgumentException("patternTree cannot be null"); } // x and <ID>, x and y, or x and x; or could be mismatched types if (tree is ITerminalNode && patternTree is ITerminalNode) { ITerminalNode t1 = (ITerminalNode)tree; ITerminalNode t2 = (ITerminalNode)patternTree; IParseTree mismatchedNode = null; // both are tokens and they have same type if (t1.Symbol.Type == t2.Symbol.Type) { if (t2.Symbol is TokenTagToken) { // x and <ID> TokenTagToken tokenTagToken = (TokenTagToken)t2.Symbol; // track label->list-of-nodes for both token name and label (if any) labels.Map(tokenTagToken.TokenName, tree); if (tokenTagToken.Label != null) { labels.Map(tokenTagToken.Label, tree); } } else { if (t1.GetText().Equals(t2.GetText(), StringComparison.Ordinal)) { } else { // x and x // x and y if (mismatchedNode == null) { mismatchedNode = t1; } } } } else { if (mismatchedNode == null) { mismatchedNode = t1; } } return(mismatchedNode); } if (tree is ParserRuleContext && patternTree is ParserRuleContext) { ParserRuleContext r1 = (ParserRuleContext)tree; ParserRuleContext r2 = (ParserRuleContext)patternTree; IParseTree mismatchedNode = null; // (expr ...) and <expr> RuleTagToken ruleTagToken = GetRuleTagToken(r2); if (ruleTagToken != null) { if (r1.RuleIndex == r2.RuleIndex) { // track label->list-of-nodes for both rule name and label (if any) labels.Map(ruleTagToken.RuleName, tree); if (ruleTagToken.Label != null) { labels.Map(ruleTagToken.Label, tree); } } else { if (mismatchedNode == null) { mismatchedNode = r1; } } return(mismatchedNode); } // (expr ...) and (expr ...) if (r1.ChildCount != r2.ChildCount) { if (mismatchedNode == null) { mismatchedNode = r1; } return(mismatchedNode); } int n = r1.ChildCount; for (int i = 0; i < n; i++) { IParseTree childMatch = MatchImpl(r1.GetChild(i), patternTree.GetChild(i), labels); if (childMatch != null) { return(childMatch); } } return(mismatchedNode); } // if nodes aren't both tokens or both rule nodes, can't match return(tree); }
public void ClearBreakpoints() { /* lock (this) */ { _breakpoints = new MultiMap<string, Breakpoint>(); UnrealDebuggerIDE.Instance.RaiseBreakpointsChanged(_breakpoints); } }
private static void ExtractEffects(string path) { EffectStates state = EffectStates.Unknown; MultiMap <string, string> effects = new MultiMap <string, string>(); ClausewitzParser parser = new ClausewitzParser( name => { if (name == "effects") { if (state != EffectStates.Unknown) { throw new ClauzewitzSyntaxException("effects block inside " + state.ToString()); } state = EffectStates.Effects; } else if (name == "command") { if (state != EffectStates.Effects) { throw new ClauzewitzSyntaxException("command block inside " + state.ToString()); } state = EffectStates.Command; } }, () => { if (state == EffectStates.Command) { state = EffectStates.Effects; } else if (state == EffectStates.Effects) { state = EffectStates.Unknown; } }, (name, val) => { if (state != EffectStates.Command) { return; } effects.Add(name, val); }, val => { }); foreach (string filename in Directory.GetFiles(path)) { if (filename.EndsWith(@"\old_nuclear_tech.txt")) { continue; } parser.Parse(filename); } Console.WriteLine("Found the following effects:"); foreach (string key in effects.Keys) { Console.WriteLine(key + ":"); foreach (string val in effects.ValueList(key)) { Console.WriteLine("\t" + val); } } }
public void UpdateTagStringForCurrentDevicePath(string path, string tagString) { MultiMap<String, String> pathToTags = new MultiMap<string, string>(); pathToTags.PutCommaStringValues(path, tagString); try { using (var conn = new SQLiteConnection(@"Data Source=" + Configuration.GetSqliteDbPath(GetDbName()))) { conn.Open(); using (var cmd = new SQLiteCommand(conn)) { using (var transaction = conn.BeginTransaction()) { DeleteFileTagsForCurrentDevicePath(path, cmd); LinkTagsToCurrentDevicePaths(pathToTags, cmd); transaction.Commit(); } } conn.Close(); } } catch (Exception) { //do nothing } }
private static void MakeLeanMSG2(Store msg, ResSet predicates, StatementSink removed, ResSet nodesremoved, BNode startingnode) { // Find every pair of two distinct outgoing edges from startingnode // with the same predicate, targeting entities only. MultiMap edges = new MultiMap(); foreach (Statement s in msg.Select(new Statement(startingnode, null, null))) { if (s.Object is Entity) { edges.Put(new Edge(true, startingnode, s.Predicate, null), s.Object); } } foreach (Statement s in msg.Select(new Statement(null, null, startingnode))) { edges.Put(new Edge(false, startingnode, s.Predicate, null), s.Subject); } foreach (Edge e in edges.Keys) { // Make sure we have a distinct set of targets. ResSet targets_set = new ResSet(); foreach (Entity r in edges.Get(e)) { targets_set.Add(r); } if (targets_set.Count == 1) { continue; } IList targets = targets_set.ToEntityArray(); // Take every pair of targets, provided // one is a bnode that can be a variable. for (int i = 0; i < targets.Count; i++) { if (!(targets[i] is BNode) || predicates.Contains((BNode)targets[i])) { continue; } if (nodesremoved.Contains((BNode)targets[i])) { continue; } for (int j = 0; j < targets.Count; j++) { if (i == j) { continue; } // Create a new synchronous-path object. SyncPath p = new SyncPath(); p.FixedNodes.Add((Resource)targets[j]); p.FrontierVariables.Add((Resource)targets[i]); p.Mapping[targets[i]] = targets[j]; p.Path[new Edge(e.Direction, e.Start, e.Predicate, (BNode)targets[i])] = p.Path; if (MakeLeanMSG3(msg, predicates, removed, nodesremoved, p)) { break; // the target was removed } } } } }
public virtual ParseTreeMatch Match(IParseTree tree, ParseTreePattern pattern) { MultiMap<string, IParseTree> labels = new MultiMap<string, IParseTree>(); IParseTree mismatchedNode = MatchImpl(tree, pattern.PatternTree, labels); return new ParseTreeMatch(tree, pattern, labels, mismatchedNode); }
public void SetUp() { sample = new SampleWrapper(Activator.CreateInstance(sampleType)); interceptors = new MultiMap <MethodInfo, IInterceptor>(); }
public static string InternalDataConnections(IEnumerable <Connection> internalBuffers, IEnumerable <WithConnection> withConnections, IEnumerable <Variable> allVariables, IEnumerable <FBInstance> instances) { string dataConnections = "-- _internalDataConnections\n"; foreach (Connection connection in internalBuffers.Where(conn => conn.Type == ConnectionType.Data)) { bool srcComponent; bool dstComponent; string dstSmvVar = Smv.ConvertConnectionVariableName(connection.Destination, Smv.ModuleParameters.Variable, out dstComponent); string srcSmvVar = Smv.ConvertConnectionVariableName(connection.Source, Smv.ModuleParameters.Variable, out srcComponent); string srcString = ""; /*if (srcComponent && dstComponent) * { * //srcString = "\t" + srcSmvVar + " : " + Smv.True + ";\n"; //TODO: make direct connections without double-buffering * var srcVar = _findVariable(connection.Source, allVariables, instances); * * if (srcVar.ArraySize == 0) * dataConnections += String.Format(Smv.NextVarAssignment + "\n", dstSmvVar, srcSmvVar); * else * { * for (int i = 0; i < srcVar.ArraySize; i++) * { * dataConnections += String.Format(Smv.NextVarAssignment + "\n", dstSmvVar + Smv.ArrayIndex(i), srcSmvVar + Smv.ArrayIndex(i)); * } * } * } * else */ if (!srcComponent && dstComponent) { var dstVar = _findVariable(connection.Destination, allVariables, instances); if (dstVar.ArraySize == 0) { srcString = FbSmvCommon.VarSamplingRule(connection.Source, withConnections, false); dataConnections += String.Format(Smv.NextCaseBlock + "\n", dstSmvVar, srcString); } else { for (int i = 0; i < dstVar.ArraySize; i++) { srcString = FbSmvCommon.VarSamplingRule(connection.Source, withConnections, false, i); dataConnections += String.Format(Smv.NextCaseBlock + "\n", dstSmvVar + Smv.ArrayIndex(i), srcString); } } } else if (srcComponent && !dstComponent) { IEnumerable <string> samplingEvents = _getSamplingEventNamesForVariable(connection.Destination, withConnections); MultiMap <string> eventConnectionsMap = _getEventConnectionsMap(internalBuffers); string eventSeed = ""; foreach (string ev in samplingEvents) { string src = ""; foreach (string parentEvent in eventConnectionsMap[ev]) { bool dontCare; src += Smv.ConvertConnectionVariableName(parentEvent, Smv.ModuleParameters.Event, out dontCare) + Smv.Or; } eventSeed += String.Format("({0}){1}", src.TrimEnd(Smv.OrTrimChars), Smv.Or); } var srcVar = _findVariable(connection.Source, allVariables, instances); if (srcVar.ArraySize == 0) { srcString = String.Format("\t{0} : {1};\n", eventSeed.TrimEnd(Smv.OrTrimChars), srcSmvVar); dataConnections += String.Format(Smv.NextCaseBlock + "\n", dstSmvVar, srcString); } else { for (int i = 0; i < srcVar.ArraySize; i++) { srcString = String.Format("\t{0} : {1};\n", eventSeed.TrimEnd(Smv.OrTrimChars), srcSmvVar + Smv.ArrayIndex(i)); dataConnections += String.Format(Smv.NextCaseBlock + "\n", dstSmvVar + Smv.ArrayIndex(i), srcString); } } } } return(dataConnections); }
void LoadResetTimes() { long now = Time.UnixTime; long today = (now / Time.Day) * Time.Day; // NOTE: Use DirectPExecute for tables that will be queried later // get the current reset times for normal instances (these may need to be updated) // these are only kept in memory for InstanceSaves that are loaded later // resettime = 0 in the DB for raid/heroic instances so those are skipped Dictionary <uint, Tuple <uint, long> > instResetTime = new Dictionary <uint, Tuple <uint, long> >(); // index instance ids by map/difficulty pairs for fast reset warning send MultiMap <uint, uint> mapDiffResetInstances = new MultiMap <uint, uint>(); SQLResult result = DB.Characters.Query("SELECT id, map, difficulty, resettime FROM instance ORDER BY id ASC"); if (!result.IsEmpty()) { do { uint instanceId = result.Read <uint>(0); // Mark instance id as being used Global.MapMgr.RegisterInstanceId(instanceId); long resettime = result.Read <long>(3); if (resettime != 0) { uint mapid = result.Read <ushort>(1); uint difficulty = result.Read <byte>(2); instResetTime[instanceId] = Tuple.Create(MathFunctions.MakePair32(mapid, difficulty), resettime); mapDiffResetInstances.Add(MathFunctions.MakePair32(mapid, difficulty), instanceId); } }while (result.NextRow()); // schedule the reset times foreach (var pair in instResetTime) { if (pair.Value.Item2 > now) { ScheduleReset(true, pair.Value.Item2, new InstResetEvent(0, MathFunctions.Pair32_LoPart(pair.Value.Item1), (Difficulty)MathFunctions.Pair32_HiPart(pair.Value.Item1), pair.Key)); } } } // load the global respawn times for raid/heroic instances uint diff = (uint)(WorldConfig.GetIntValue(WorldCfg.InstanceResetTimeHour) * Time.Hour); result = DB.Characters.Query("SELECT mapid, difficulty, resettime FROM instance_reset"); if (!result.IsEmpty()) { do { uint mapid = result.Read <ushort>(0); Difficulty difficulty = (Difficulty)result.Read <byte>(1); long oldresettime = result.Read <long>(2); MapDifficultyRecord mapDiff = Global.DB2Mgr.GetMapDifficultyData(mapid, difficulty); if (mapDiff == null) { Log.outError(LogFilter.Server, "InstanceSaveManager.LoadResetTimes: invalid mapid({0})/difficulty({1}) pair in instance_reset!", mapid, difficulty); PreparedStatement stmt = DB.Characters.GetPreparedStatement(CharStatements.DEL_GLOBAL_INSTANCE_RESETTIME); stmt.AddValue(0, mapid); stmt.AddValue(1, (byte)difficulty); DB.Characters.DirectExecute(stmt); continue; } // update the reset time if the hour in the configs changes long newresettime = (oldresettime / Time.Day) * Time.Day + diff; if (oldresettime != newresettime) { PreparedStatement stmt = DB.Characters.GetPreparedStatement(CharStatements.UPD_GLOBAL_INSTANCE_RESETTIME); stmt.AddValue(0, newresettime); stmt.AddValue(1, mapid); stmt.AddValue(2, (byte)difficulty); DB.Characters.DirectExecute(stmt); } InitializeResetTimeFor(mapid, difficulty, newresettime); } while (result.NextRow()); } // calculate new global reset times for expired instances and those that have never been reset yet // add the global reset times to the priority queue foreach (var mapDifficultyPair in Global.DB2Mgr.GetMapDifficulties()) { uint mapid = mapDifficultyPair.Key; foreach (var difficultyPair in mapDifficultyPair.Value) { Difficulty difficulty = (Difficulty)difficultyPair.Key; MapDifficultyRecord mapDiff = difficultyPair.Value; if (mapDiff.GetRaidDuration() == 0) { continue; } // the reset_delay must be at least one day uint period = (uint)(((mapDiff.GetRaidDuration() * WorldConfig.GetFloatValue(WorldCfg.RateInstanceResetTime)) / Time.Day) * Time.Day); if (period < Time.Day) { period = Time.Day; } long t = GetResetTimeFor(mapid, difficulty); if (t == 0) { // initialize the reset time t = today + period + diff; PreparedStatement stmt = DB.Characters.GetPreparedStatement(CharStatements.INS_GLOBAL_INSTANCE_RESETTIME); stmt.AddValue(0, mapid); stmt.AddValue(1, (byte)difficulty); stmt.AddValue(2, t); DB.Characters.DirectExecute(stmt); } if (t < now) { // assume that expired instances have already been cleaned // calculate the next reset time t = (t / Time.Day) * Time.Day; t += ((today - t) / period + 1) * period + diff; PreparedStatement stmt = DB.Characters.GetPreparedStatement(CharStatements.UPD_GLOBAL_INSTANCE_RESETTIME); stmt.AddValue(0, t); stmt.AddValue(1, mapid); stmt.AddValue(2, (byte)difficulty); DB.Characters.DirectExecute(stmt); } InitializeResetTimeFor(mapid, difficulty, t); // schedule the global reset/warning byte type; for (type = 1; type < 4; ++type) { if (t - ResetTimeDelay[type - 1] > now) { break; } } ScheduleReset(true, t - ResetTimeDelay[type - 1], new InstResetEvent(type, mapid, difficulty, 0)); var range = mapDiffResetInstances.LookupByKey(MathFunctions.MakePair32(mapid, (uint)difficulty)); foreach (var id in range) { ScheduleReset(true, t - ResetTimeDelay[type - 1], new InstResetEvent(type, mapid, difficulty, id)); } } } }
public AnnotationIndex(Func <Annotation, bool> includeIndexPredicate, Func <Annotation, string> keyMakingFunction) { _keyToObjectsMap = new MultiMap <string, Annotation>(); _includeIndexPredicate = includeIndexPredicate; _keyMakingFunction = keyMakingFunction; }
CalendarManager() { _events = new List <CalendarEvent>(); _invites = new MultiMap <ulong, CalendarInvite>(); }
WaypointManager() { _waypointStore = new MultiMap <uint, WaypointData>(); }
void LoadResetTimes() { long now = Time.UnixTime; long today = (now / Time.Day) * Time.Day; // NOTE: Use DirectPExecute for tables that will be queried later // get the current reset times for normal instances (these may need to be updated) // these are only kept in memory for InstanceSaves that are loaded later // resettime = 0 in the DB for raid/heroic instances so those are skipped Dictionary <uint, Tuple <uint, long> > instResetTime = new Dictionary <uint, Tuple <uint, long> >(); // index instance ids by map/difficulty pairs for fast reset warning send MultiMap <uint, uint> mapDiffResetInstances = new MultiMap <uint, uint>(); SQLResult result = DB.Characters.Query("SELECT id, map, difficulty, resettime FROM instance ORDER BY id ASC"); if (!result.IsEmpty()) { do { uint instanceId = result.Read <uint>(0); // Instances are pulled in ascending order from db and nextInstanceId is initialized with 1, // so if the instance id is used, increment until we find the first unused one for a potential new instance if (Global.MapMgr.GetNextInstanceId() == instanceId) { Global.MapMgr.SetNextInstanceId(instanceId + 1); } // Mark instance id as being used Global.MapMgr.RegisterInstanceId(instanceId); long resettime = result.Read <uint>(3); if (resettime != 0) { uint mapid = result.Read <ushort>(1); uint difficulty = result.Read <byte>(2); instResetTime[instanceId] = Tuple.Create(MathFunctions.MakePair32(mapid, difficulty), resettime); mapDiffResetInstances.Add(MathFunctions.MakePair32(mapid, difficulty), instanceId); } }while (result.NextRow()); // update reset time for normal instances with the max creature respawn time + X hours SQLResult result2 = DB.Characters.Query(DB.Characters.GetPreparedStatement(CharStatements.SEL_MAX_CREATURE_RESPAWNS)); if (!result2.IsEmpty()) { do { uint instance = result2.Read <uint>(1); long resettime = result2.Read <uint>(0) + 2 * Time.Hour; var pair = instResetTime.LookupByKey(instance); if (pair != null && pair.Item2 != resettime) { DB.Characters.DirectExecute("UPDATE instance SET resettime = '{0}' WHERE id = '{1}'", resettime, instance); instResetTime[instance] = Tuple.Create(pair.Item1, resettime); } }while (result2.NextRow()); } // schedule the reset times foreach (var pair in instResetTime) { if (pair.Value.Item2 > now) { ScheduleReset(true, pair.Value.Item2, new InstResetEvent(0, MathFunctions.Pair32_LoPart(pair.Value.Item1), (Difficulty)MathFunctions.Pair32_HiPart(pair.Value.Item1), pair.Key)); } } } // load the global respawn times for raid/heroic instances uint diff = (uint)(WorldConfig.GetIntValue(WorldCfg.InstanceResetTimeHour) * Time.Hour); result = DB.Characters.Query("SELECT mapid, difficulty, resettime FROM instance_reset"); if (!result.IsEmpty()) { do { uint mapid = result.Read <ushort>(0); Difficulty difficulty = (Difficulty)result.Read <byte>(1); ulong oldresettime = result.Read <uint>(2); MapDifficultyRecord mapDiff = Global.DB2Mgr.GetMapDifficultyData(mapid, difficulty); if (mapDiff == null) { Log.outError(LogFilter.Server, "InstanceSaveManager.LoadResetTimes: invalid mapid({0})/difficulty({1}) pair in instance_reset!", mapid, difficulty); DB.Characters.DirectExecute("DELETE FROM instance_reset WHERE mapid = '{0}' AND difficulty = '{1}'", mapid, difficulty); continue; } // update the reset time if the hour in the configs changes ulong newresettime = (oldresettime / Time.Day) * Time.Day + diff; if (oldresettime != newresettime) { DB.Characters.DirectExecute("UPDATE instance_reset SET resettime = '{0}' WHERE mapid = '{1}' AND difficulty = '{2}'", newresettime, mapid, difficulty); } InitializeResetTimeFor(mapid, difficulty, (long)newresettime); } while (result.NextRow()); } // calculate new global reset times for expired instances and those that have never been reset yet // add the global reset times to the priority queue foreach (var mapDifficultyPair in Global.DB2Mgr.GetMapDifficulties()) { uint mapid = mapDifficultyPair.Key; foreach (var difficultyPair in mapDifficultyPair.Value) { Difficulty difficulty = (Difficulty)difficultyPair.Key; MapDifficultyRecord mapDiff = difficultyPair.Value; if (mapDiff.GetRaidDuration() == 0) { continue; } // the reset_delay must be at least one day uint period = (uint)(((mapDiff.GetRaidDuration() * WorldConfig.GetFloatValue(WorldCfg.RateInstanceResetTime)) / Time.Day) * Time.Day); if (period < Time.Day) { period = Time.Day; } long t = GetResetTimeFor(mapid, difficulty); if (t == 0) { // initialize the reset time t = today + period + diff; DB.Characters.DirectExecute("INSERT INTO instance_reset VALUES ('{0}', '{1}', '{2}')", mapid, (uint)difficulty, (uint)t); } if (t < now) { // assume that expired instances have already been cleaned // calculate the next reset time t = (t / Time.Day) * Time.Day; t += ((today - t) / period + 1) * period + diff; DB.Characters.DirectExecute("UPDATE instance_reset SET resettime = '{0}' WHERE mapid = '{1}' AND difficulty= '{2}'", t, mapid, (uint)difficulty); } InitializeResetTimeFor(mapid, difficulty, t); // schedule the global reset/warning byte type; for (type = 1; type < 4; ++type) { if (t - ResetTimeDelay[type - 1] > now) { break; } } ScheduleReset(true, t - ResetTimeDelay[type - 1], new InstResetEvent(type, mapid, difficulty, 0)); var range = mapDiffResetInstances.LookupByKey(MathFunctions.MakePair32(mapid, (uint)difficulty)); foreach (var id in range) { ScheduleReset(true, t - ResetTimeDelay[type - 1], new InstResetEvent(type, mapid, difficulty, id)); } } } }