/// <summary> /// Executes SOSIEL Algorithm /// </summary> /// <param name="activeSites"></param> protected void RunSosiel(IEnumerable <Site> activeSites) { for (int i = 1; i <= numberOfIterations; i++) { iterationCounter++; Dictionary <IAgent, AgentState> currentIteration; if (iterationCounter > 1) { currentIteration = iterations.AddLast(new Dictionary <IAgent, AgentState>()).Value; } else { currentIteration = iterations.AddLast(InitializeFirstIterationState()).Value; } Dictionary <IAgent, AgentState> priorIteration = iterations.Last.Previous?.Value; rankedGoals = new Dictionary <IAgent, Goal[]>(agentList.Agents.Count); IAgent[] orderedAgents = agentList.ActiveAgents.Randomize(processConfiguration.AgentRandomizationEnabled).ToArray(); var agentGroups = orderedAgents.GroupBy(a => a[SosielVariables.AgentType]).OrderBy(group => group.Key).ToArray(); orderedAgents.ForEach(a => { rankedGoals.Add(a, a.AssignedGoals.ToArray()); if (iterationCounter > 1) { currentIteration.Add(a, priorIteration[a].CreateForNextIteration()); } }); if (processConfiguration.UseDemographicProcesses && iterationCounter > 1) { demographic.ChangeDemographic(iterationCounter, currentIteration, agentList); } PreIterationCalculations(iterationCounter); PreIterationStatistic(iterationCounter); Site[] orderedSites = activeSites.Randomize().ToArray(); Site[] notSiteOriented = new Site[] { Site.DefaultSite }; if (iterationCounter == 1) { foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { rankedGoals[agent] = gs.SortByImportance(agent, currentIteration[agent].GoalsState) .ToArray(); } } } if (processConfiguration.AnticipatoryLearningEnabled && iterationCounter > 1) { //1st round: AL, CT, IR foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { //anticipatory learning process al.Execute(agent, iterations.Last); var agentGoalState = currentIteration[agent].GoalsState; //goal prioritizing gp.Prioritize(agent, agentGoalState); //goal selecting rankedGoals[agent] = gs.SortByImportance(agent, agentGoalState).ToArray(); if (processConfiguration.CounterfactualThinkingEnabled) { if (rankedGoals[agent].Any(g => currentIteration[agent].GoalsState.Any(kvp => kvp.Value.Confidence == false))) { foreach (Site site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { BeforeCounterfactualThinking(agent, site); foreach (var set in agent.AssignedDecisionOptions.GroupBy(h => h.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { //optimization Goal selectedGoal = rankedGoals[agent].First(g => set.Key.AssociatedWith.Contains(g)); GoalState selectedGoalState = currentIteration[agent].GoalsState[selectedGoal]; if (selectedGoalState.Confidence == false) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { if (layer.Key.LayerConfiguration.Modifiable || (!layer.Key.LayerConfiguration.Modifiable && layer.Any(r => r.IsModifiable))) { //looking for matched decision option in prior period DecisionOption[] matchedDecisionOptions = priorIteration[agent].DecisionOptionsHistories[site] .Matched.Where(h => h.Layer == layer.Key).ToArray(); bool?CTResult = null; //counterfactual thinking process if (matchedDecisionOptions.Length >= 2) { CTResult = ct.Execute(agent, iterations.Last, selectedGoal, matchedDecisionOptions, layer.Key, site); } if (processConfiguration.InnovationEnabled) { //innovation process if (CTResult == false || matchedDecisionOptions.Length < 2) { innovation.Execute(agent, iterations.Last, selectedGoal, layer.Key, site, probabilities); } } } } } } } } } } } } if (processConfiguration.SocialLearningEnabled && iterationCounter > 1) { //2nd round: SL foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (var set in agent.AssignedDecisionOptions.GroupBy(h => h.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { //social learning process sl.ExecuteLearning(agent, iterations.Last, layer.Key); } } } } } if (processConfiguration.DecisionOptionSelectionEnabled) { //AS part I foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (Site site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { foreach (var set in agent.AssignedDecisionOptions.GroupBy(h => h.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { BeforeActionSelection(agent, site); //satisficing satisficing.ExecutePartI(agent, iterations.Last, rankedGoals, layer.ToArray(), site); } } } } } if (processConfiguration.DecisionOptionSelectionPart2Enabled && iterationCounter > 1) { //4th round: AS part II foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (Site site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { foreach (var set in agent.AssignedDecisionOptions.GroupBy(r => r.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { BeforeActionSelection(agent, site); //action selection process part II satisficing.ExecutePartII(agent, iterations.Last, rankedGoals, layer.ToArray(), site); } } } } } } } if (processConfiguration.ActionTakingEnabled) { //5th round: TA foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (Site site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { at.Execute(agent, currentIteration[agent], site); AfterActionTaking(agent, site); } } } } if (processConfiguration.AlgorithmStopIfAllAgentsSelectDoNothing && iterationCounter > 1) { if (!currentIteration.SelectMany(kvp => kvp.Value.DecisionOptionsHistories.Values.SelectMany(rh => rh.Activated)).Any()) { algorithmStoppage = true; } } PostIterationCalculations(iterationCounter); PostIterationStatistic(iterationCounter); if (processConfiguration.AgentsDeactivationEnabled && iterationCounter > 1) { AgentsDeactivation(); } AfterDeactivation(iterationCounter); if (processConfiguration.ReproductionEnabled && iterationCounter > 1) { Reproduction(0); } if (algorithmStoppage || agentList.ActiveAgents.Length == 0) { break; } Maintenance(); } }
/// <summary> /// Executes SOSIEL Algorithm /// </summary> /// <param name="activeSites"></param> protected void RunSosiel(IEnumerable <ActiveSite> activeSites) { for (int i = 1; i <= numberOfIterations; i++) { iterationCounter++; IAgent[] orderedAgents = agentList.ActiveAgents.Randomize(processConfiguration.AgentRandomizationEnabled).ToArray(); var agentGroups = orderedAgents.GroupBy(a => a[VariablesUsedInCode.AgentType]).OrderBy(group => group.Key).ToArray(); PreIterationCalculations(iterationCounter); PreIterationStatistic(iterationCounter); Dictionary <IAgent, AgentState> currentIteration; if (iterationCounter > 1) { currentIteration = iterations.AddLast(new Dictionary <IAgent, AgentState>()).Value; } else { currentIteration = iterations.AddLast(InitializeFirstIterationState()).Value; } Dictionary <IAgent, AgentState> priorIteration = iterations.Last.Previous?.Value; Dictionary <IAgent, Goal[]> rankedGoals = new Dictionary <IAgent, Goal[]>(agentList.Agents.Count); orderedAgents.ForEach(a => { rankedGoals.Add(a, a.AssignedGoals.ToArray()); if (iterationCounter > 1) { currentIteration.Add(a, priorIteration[a].CreateForNextIteration()); } }); ActiveSite[] orderedSites = activeSites.Randomize().ToArray(); ActiveSite[] notSiteOriented = new ActiveSite[] { default(ActiveSite) }; if (processConfiguration.AnticipatoryLearningEnabled && iterationCounter > 1) { //1st round: AL, CT, IR foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { //anticipatory learning process rankedGoals[agent] = al.Execute(agent, iterations.Last); if (processConfiguration.CounterfactualThinkingEnabled == true) { if (rankedGoals[agent].Any(g => currentIteration[agent].GoalsState.Any(kvp => kvp.Value.Confidence == false))) { foreach (ActiveSite site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { BeforeCounterfactualThinking(agent, site); foreach (var set in agent.AssignedRules.GroupBy(h => h.Layer.Set).OrderBy((IGrouping <RuleSet, Rule> g) => g.Key.PositionNumber)) { //optimization Goal selectedGoal = rankedGoals[agent].First(g => set.Key.AssociatedWith.Contains(g)); GoalState selectedGoalState = currentIteration[agent].GoalsState[selectedGoal]; if (selectedGoalState.Confidence == false) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy((IGrouping <RuleLayer, Rule> g) => g.Key.PositionNumber)) { if (layer.Key.LayerConfiguration.Modifiable || (!layer.Key.LayerConfiguration.Modifiable && layer.Any(r => r.IsModifiable))) { //looking for matched rules in prior period Rule[] matchedPriorPeriodHeuristics = priorIteration[agent].RuleHistories[site] .Matched.Where(h => h.Layer == layer.Key).ToArray(); bool?CTResult = null; //counterfactual thinking process if (matchedPriorPeriodHeuristics.Length >= 2) { CTResult = ct.Execute(agent, iterations.Last, selectedGoal, matchedPriorPeriodHeuristics, layer.Key, site); } if (processConfiguration.InnovationEnabled == true) { //innovation process if (CTResult == false || matchedPriorPeriodHeuristics.Length < 2) { it.Execute(agent, iterations.Last, selectedGoal, layer.Key, site); } } } } } } } } } } } } if (processConfiguration.SocialLearningEnabled && iterationCounter > 1) { //2nd round: SL foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (var set in agent.AssignedRules.GroupBy(h => h.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { //social learning process sl.ExecuteLearning(agent, iterations.Last, layer.Key); } } } } } if (processConfiguration.RuleSelectionEnabled && iterationCounter > 1) { //AS part I foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (ActiveSite site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { foreach (var set in agent.AssignedRules.GroupBy(h => h.Layer.Set).OrderBy((IGrouping <RuleSet, Rule> g) => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy((IGrouping <RuleLayer, Rule> g) => g.Key.PositionNumber)) { BeforeActionSelection(agent, site); //action selection process part I acts.ExecutePartI(agent, iterations.Last, rankedGoals[agent], layer.ToArray(), site); } } } } } if (processConfiguration.RuleSelectionPart2Enabled && iterationCounter > 1) { //4th round: AS part II foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (ActiveSite site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { foreach (var set in agent.AssignedRules.GroupBy(r => r.Layer.Set).OrderBy((IGrouping <RuleSet, Rule> g) => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy((IGrouping <RuleLayer, Rule> g) => g.Key.PositionNumber)) { BeforeActionSelection(agent, site); //action selection process part II acts.ExecutePartII(agent, iterations.Last, rankedGoals[agent], layer.ToArray(), site); } } } } } } } if (processConfiguration.ActionTakingEnabled) { //5th round: TA foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (ActiveSite site in agent.Prototype.IsSiteOriented ? orderedSites : notSiteOriented) { at.Execute(agent, currentIteration[agent], site); AfterActionTaking(agent, site); } //if (periods.Last.Value.IsOverconsumption) // return periods; } } } if (processConfiguration.AlgorithmStopIfAllAgentsSelectDoNothing && iterationCounter > 1) { if (currentIteration.SelectMany(kvp => kvp.Value.RuleHistories.Values.SelectMany(rh => rh.Activated)).All(r => r.IsAction == false)) { algorithmStoppage = true; } } PostIterationCalculations(iterationCounter); PostIterationStatistic(iterationCounter); if (processConfiguration.AgentsDeactivationEnabled && iterationCounter > 1) { AgentsDeactivation(); } AfterDeactivation(iterationCounter); if (processConfiguration.ReproductionEnabled && iterationCounter > 1) { Reproduction(0); } if (algorithmStoppage || agentList.ActiveAgents.Length == 0) { break; } Maintenance(); } }