public override string[] getResponses() { if (operatingDecision.decisionOptions == null) { return new string[] { } } ; List <string> retval = new List <string>(); foreach (GameObject go in operatingDecision.decisionOptions) { DecisionOption o = go.GetComponent <DecisionOption>(); if (!GameData.RequirementsMet(o.requirements)) { continue; } retval.Add(o.text); } if (!intentionalNoResponse && retval.Count == 0) { retval.Add(DEFAULT_RESPONSE()); } return(retval.ToArray()); }
public int SaveOptionsToDecision(DecisionOption option) { using (OracleConnection cn = new OracleConnection(ConfigurationManager.ConnectionStrings["OracleDatabase"].ConnectionString)) { int newDecisionID; OracleDataAdapter da = new OracleDataAdapter(); OracleCommand cmd = new OracleCommand(); cmd.Connection = cn; cmd.InitialLONGFetchSize = 1000; cmd.CommandType = CommandType.Text; cmd.CommandText = String.Format("INSERT INTO dm_options (Description,DecisionID) VALUES ('{0}','{1}') returning id into :myOutputParameter", option.Description, option.DecisionID); cmd.Parameters.Add("myOutputParameter", OracleDbType.Decimal, ParameterDirection.ReturnValue); cmd.Connection.Open(); try { cmd.ExecuteNonQuery(); var result = ((OracleDecimal)cmd.Parameters["myOutputParameter"].Value); newDecisionID = Convert.ToInt32(result.Value); } catch (Exception) { throw new Exception("##Error save to DB## : " + cmd.CommandText); } cmd.Connection.Close(); return(newDecisionID); } }
/// <summary> /// Executes counterfactual thinking about most important agent goal for specific site /// </summary> /// <param name="agent"></param> /// <param name="lastIteration"></param> /// <param name="goal"></param> /// <param name="matched"></param> /// <param name="layer"></param> /// <param name="site"></param> /// <returns></returns> public bool Execute(IAgent agent, LinkedListNode <Dictionary <IAgent, AgentState <TSite> > > lastIteration, Goal goal, DecisionOption[] matched, DecisionOptionLayer layer, TSite site) { //Period currentPeriod = periodModel.Value; AgentState <TSite> priorIterationAgentState = lastIteration.Previous.Value[agent]; selectedGoal = goal; selectedGoalState = lastIteration.Value[agent].GoalsState[selectedGoal]; selectedGoalState.Confidence = false; DecisionOptionsHistory history = priorIterationAgentState.DecisionOptionsHistories[site]; activatedDecisionOption = history.Activated.FirstOrDefault(r => r.Layer == layer); anticipatedInfluences = agent.AnticipationInfluence; matchedDecisionOptions = matched; SpecificLogic(selectedGoal.Tendency); return(selectedGoalState.Confidence); }
private static Dictionary <string, DecisionOption> ParseDecisionOptions( List <DecisionOptionAttribute> decisionOptionAttributes, List <DecisionOptionAntecedentAttribute> decisionOptionAntecedentAttributes) { var decisionOptions = new Dictionary <string, DecisionOption>(); foreach (var decisionOptionAttribute in decisionOptionAttributes) { var parsedName = ParseDecisionOptionName(decisionOptionAttribute.DecisionOption); var decisionOption = new DecisionOption(); decisionOption.ParentMentalModelId = parsedName.MentalModel; decisionOption.ParentDecisionOptionLayerId = parsedName.MentalSubModel; decisionOption.Id = parsedName.DecisionOptionNumber; decisionOption.RequiredParticipants = decisionOptionAttribute.RequiredParticipants; decisionOption.Antecedent = decisionOptionAntecedentAttributes .Where(a => a.DecisionOption == decisionOptionAttribute.DecisionOption) .Select(a => new DecisionOptionAntecedentPart(a.AntecedentVariable, a.AntecedentOperator, ParseDynamicValue(a.AntecedentValueType, a.AntecedentValue), a.AntecedentReference)) .ToArray(); var consequentValue = ParseDynamicValue(decisionOptionAttribute.ConsequentValueType, decisionOptionAttribute.ConsequentValue); decisionOption.Consequent = new DecisionOptionConsequent( decisionOptionAttribute.ConsequentVariable, consequentValue, decisionOptionAttribute.ConsequentValueReference); decisionOptions.Add(decisionOptionAttribute.DecisionOption, decisionOption); } return(decisionOptions); }
protected override void Minimize() { if (matchedDecisionOptions.Length > 0) { DecisionOption[] selected = matchedDecisionOptions.GroupBy(r => anticipatedInfluence[r][processedGoal]).OrderBy(hg => hg.Key).First().ToArray(); decisionOptionForActivating = selected.RandomizeOne(); } }
protected override void MaintainAtValue() { if (goalState.Value == goalState.FocalValue) { return; } DecisionOption[] selected = matchedDecisionOptions; if (matchedDecisionOptions.Length > 1) { selected = matchedDecisionOptions.GroupBy(r => anticipatedInfluence[r][processedGoal] - Math.Abs(goalState.FocalValue - goalState.Value)) .OrderBy(hg => hg.Key).First().ToArray(); } decisionOptionForActivating = selected.RandomizeOne(); }
/// <summary> /// Shares collective action among same household agents /// </summary> /// <param name="currentAgent"></param> /// <param name="decisionOption"></param> /// <param name="agentStates"></param> List <IAgent> SignalingInterest(IAgent currentAgent, DecisionOption decisionOption, Dictionary <IAgent, AgentState> agentStates) { var scope = decisionOption.Scope; var agents = new List <IAgent>(); foreach (IAgent neighbour in currentAgent.ConnectedAgents .Where(connected => connected[scope] == currentAgent[scope] || scope == null)) { if (neighbour.AssignedDecisionOptions.Contains(decisionOption) == false) { neighbour.AssignNewDecisionOption(decisionOption, currentAgent.AnticipationInfluence[decisionOption]); agents.Add(neighbour); } } return(agents); }
protected override void AfterInnovation(IAgent agent, IDataSet dataSet, DecisionOption newDecisionOption) { base.AfterInnovation(agent, dataSet, newDecisionOption); if (newDecisionOption == null) { return; } var newDecisionOptionModel = new NewDecisionOptionModel() { ManagementArea = dataSet.Name, Name = newDecisionOption.Name, ConsequentVariable = newDecisionOption.Consequent.Param, ConsequentValue = string.IsNullOrEmpty(newDecisionOption.Consequent.VariableValue) ? newDecisionOption.Consequent.Value : agent[newDecisionOption.Consequent.VariableValue], BasedOn = newDecisionOption.Origin }; _algorithmModel.NewDecisionOptions.Add(newDecisionOptionModel); }
/// <summary> /// Executes second part of action selection for specific site /// </summary> /// <param name="agent"></param> /// <param name="lastIteration"></param> /// <param name="rankedGoals"></param> /// <param name="processedDecisionOptions"></param> /// <param name="site"></param> public void ExecutePartII(IAgent agent, LinkedListNode <Dictionary <IAgent, AgentState> > lastIteration, Dictionary <IAgent, Goal[]> rankedGoals, DecisionOption[] processedDecisionOptions, Site site) { AgentState agentState = lastIteration.Value[agent]; DecisionOptionsHistory history = agentState.DecisionOptionsHistories[site]; DecisionOptionLayer layer = processedDecisionOptions.First().Layer; DecisionOption selectedDecisionOptions = history.Activated.SingleOrDefault(r => r.Layer == layer); if (selectedDecisionOptions == null) { return; } if (selectedDecisionOptions.IsCollectiveAction) { var scope = selectedDecisionOptions.Scope; //counting agents which selected this decision option int numberOfInvolvedAgents = agent.ConnectedAgents.Where(connected => agent[scope] == connected[scope] || scope == null) .Count(a => lastIteration.Value[a].DecisionOptionsHistories[site].Activated.Any(decisionOption => decisionOption == selectedDecisionOptions)); int requiredParticipants = selectedDecisionOptions.RequiredParticipants - 1; //add decision option to blocked if (numberOfInvolvedAgents < requiredParticipants) { history.Blocked.Add(selectedDecisionOptions); history.Activated.Remove(selectedDecisionOptions); ExecutePartI(agent, lastIteration, rankedGoals, processedDecisionOptions, site); ExecutePartII(agent, lastIteration, rankedGoals, processedDecisionOptions, site); } } }
public IEnumerable <DecisionOption> GetDesicionOptions(int documentDecisionID) { List <DecisionOption> data = new List <DecisionOption>(); using (OracleConnection cn = new OracleConnection(ConfigurationManager.ConnectionStrings["OracleDatabase"].ConnectionString)) { cn.Open(); OracleCommand cmd = new OracleCommand(); cmd.Connection = cn; cmd.InitialLONGFetchSize = 1000; cmd.CommandText = String.Format("select * from dm_options t WHERE decisionid = '{0}'", documentDecisionID); cmd.CommandType = CommandType.Text; var reader = cmd.ExecuteReader(); while (reader.Read()) { var option = new DecisionOption(); option.ID = Convert.ToInt32(reader["ID"]); option.Description = Convert.ToString(reader["Description"]); option.DecisionID = Convert.ToInt32(reader["DecisionID"]); option.Status = Convert.ToString(reader["Status"]); // newElement.MANDATORY = Convert.ToString(reader["MANDATORY"]); // newElement.WEIGHT = Convert.ToString(reader["WEIGHT"]); // newElement.expresion = Convert.ToString(reader["expresion"]); // newElement.expresionfeild = Convert.ToString(reader["expresionfeild"]); // newElement.expresion = "";//should be new implementation // newElement.expresionfeild = ""; //should be new implementation data.Add(option); } cn.Close(); } return(data); }
private void ApplyDecision(DecisionOption chosen) { Dictionary <string, float> impactMultipliers = new Dictionary <string, float>(); foreach (Impactor i in chosen.impactors) { if (!GameData.HasID(i.requiredId)) { continue; } foreach (string app in i.applicants) { if (!impactMultipliers.ContainsKey(app)) { impactMultipliers.Add(app, 1); } impactMultipliers[app] = impactMultipliers[app] * i.multiplier; } } foreach (Impact i in chosen.impacts) { if (!validFactions.Contains(i.applicant)) { Debug.Log("DecisionController WARNING: XML specifies invalid faction: " + i.applicant); continue; } if (!impactMultipliers.ContainsKey(i.applicant)) { impactMultipliers.Add(i.applicant, 1); } uiManager.changeFactionValue(i.applicant, i.magnitude * impactMultipliers[i.applicant]); } }
public override void select(string responseText) { // print("DecisionController: selection made - " + responseText); if (responseText == DEFAULT_RESPONSE()) { this.update(); return; } DecisionOption chosen = null; foreach (GameObject option in operatingDecision.decisionOptions) { DecisionOption resp = option.GetComponent <DecisionOption>(); if (resp.text != responseText) { continue; } if (!GameData.RequirementsMet(resp.requirements)) { continue; } chosen = resp; break; } if (chosen != null) { GameData.AddID(chosen.id); ApplyDecision(chosen); } this.update(); }
/// <summary> /// Executes agent innovation process for specific site /// </summary> /// <param name="agent">The agent.</param> /// <param name="lastIteration">The last iteration.</param> /// <param name="goal">The goal.</param> /// <param name="layer">The layer.</param> /// <param name="site">The site.</param> /// <param name="probabilities">The probabilities.</param> /// <exception cref="Exception">Not implemented for AnticipatedDirection == 'stay'</exception> public void Execute(IAgent agent, LinkedListNode <Dictionary <IAgent, AgentState <TSite> > > lastIteration, Goal goal, DecisionOptionLayer layer, TSite site, Probabilities probabilities) { Dictionary <IAgent, AgentState <TSite> > currentIteration = lastIteration.Value; Dictionary <IAgent, AgentState <TSite> > priorIteration = lastIteration.Previous.Value; //gets prior period activated decision options DecisionOptionsHistory history = priorIteration[agent].DecisionOptionsHistories[site]; DecisionOption protDecisionOption = history.Activated.FirstOrDefault(r => r.Layer == layer); LinkedListNode <Dictionary <IAgent, AgentState <TSite> > > tempNode = lastIteration.Previous; //if prior period decision option is do nothing then looking for any do something decision option while (protDecisionOption == null && tempNode.Previous != null) { tempNode = tempNode.Previous; history = tempNode.Value[agent].DecisionOptionsHistories[site]; protDecisionOption = history.Activated.Single(r => r.Layer == layer); } //if activated DO is missed, then select random DO if (!agent.AssignedDecisionOptions.Contains(protDecisionOption)) { protDecisionOption = agent.AssignedDecisionOptions.Where(a => a.Layer == protDecisionOption.Layer) .RandomizeOne(); } //if the layer or prior period decision option are modifiable then generate new decision option if (layer.LayerConfiguration.Modifiable || (!layer.LayerConfiguration.Modifiable && protDecisionOption.IsModifiable)) { DecisionOptionLayerConfiguration parameters = layer.LayerConfiguration; Goal selectedGoal = goal; GoalState selectedGoalState = lastIteration.Value[agent].GoalsState[selectedGoal]; #region Generating consequent int min = parameters.MinValue(agent); int max = parameters.MaxValue(agent); double consequentValue = string.IsNullOrEmpty(protDecisionOption.Consequent.VariableValue) ? protDecisionOption.Consequent.Value : agent[protDecisionOption.Consequent.VariableValue]; double newConsequent = consequentValue; ExtendedProbabilityTable <int> probabilityTable = probabilities.GetExtendedProbabilityTable <int>(SosielProbabilityTables.GeneralProbabilityTable); double minStep = Math.Pow(0.1d, parameters.ConsequentPrecisionDigitsAfterDecimalPoint); switch (selectedGoalState.AnticipatedDirection) { case AnticipatedDirection.Up: { if (DecisionOptionLayerConfiguration.ConvertSign(parameters.ConsequentRelationshipSign[goal.Name]) == ConsequentRelationship.Positive) { if (consequentValue == max) { return; } newConsequent = probabilityTable.GetRandomValue(consequentValue + minStep, max, false); } if (DecisionOptionLayerConfiguration.ConvertSign(parameters.ConsequentRelationshipSign[goal.Name]) == ConsequentRelationship.Negative) { if (consequentValue == min) { return; } newConsequent = probabilityTable.GetRandomValue(min, consequentValue - minStep, true); } break; } case AnticipatedDirection.Down: { if (DecisionOptionLayerConfiguration.ConvertSign(parameters.ConsequentRelationshipSign[goal.Name]) == ConsequentRelationship.Positive) { if (consequentValue == min) { return; } newConsequent = probabilityTable.GetRandomValue(min, consequentValue - minStep, true); } if (DecisionOptionLayerConfiguration.ConvertSign(parameters.ConsequentRelationshipSign[goal.Name]) == ConsequentRelationship.Negative) { if (consequentValue == max) { return; } newConsequent = probabilityTable.GetRandomValue(consequentValue + minStep, max, false); } break; } default: { throw new Exception("Not implemented for AnticipatedDirection == 'stay'"); } } newConsequent = Math.Round(newConsequent, parameters.ConsequentPrecisionDigitsAfterDecimalPoint); DecisionOptionConsequent consequent = DecisionOptionConsequent.Renew(protDecisionOption.Consequent, newConsequent); #endregion #region Generating antecedent List <DecisionOptionAntecedentPart> antecedentList = new List <DecisionOptionAntecedentPart>(protDecisionOption.Antecedent.Length); bool isTopLevelDO = protDecisionOption.Layer.PositionNumber == 1; foreach (DecisionOptionAntecedentPart antecedent in protDecisionOption.Antecedent) { dynamic newConst = isTopLevelDO ? antecedent.Value : agent[antecedent.Param]; DecisionOptionAntecedentPart newAntecedent = DecisionOptionAntecedentPart.Renew(antecedent, newConst); antecedentList.Add(newAntecedent); } #endregion AgentState <TSite> agentState = currentIteration[agent]; DecisionOption newDecisionOption = DecisionOption.Renew(protDecisionOption, antecedentList.ToArray(), consequent); //change base ai values for the new decision option double consequentChangeProportion; if (consequentValue == 0) { consequentChangeProportion = 0; } else { consequentChangeProportion = Math.Abs(newDecisionOption.Consequent.Value - consequentValue) / consequentValue; } Dictionary <Goal, double> baseAI = agent.AnticipationInfluence[protDecisionOption]; Dictionary <Goal, double> proportionalAI = new Dictionary <Goal, double>(); agent.AssignedGoals.ForEach(g => { double ai = baseAI[g]; // ConsequentRelationship relationship = DecisionOptionLayerConfiguration.ConvertSign(protDecisionOption.Layer.LayerConfiguration.ConsequentRelationshipSign[g.Name]); double difference = ai * consequentChangeProportion; switch (selectedGoalState.AnticipatedDirection) { case AnticipatedDirection.Up: { if (ai >= 0) { ai += difference; } else { ai -= difference; } break; } case AnticipatedDirection.Down: { if (ai >= 0) { ai -= difference; } else { ai += difference; } break; } } proportionalAI.Add(g, ai); }); //add the generated decision option to the prototype's mental model and assign one to the agent's mental model if (agent.Prototype.IsSimilarDecisionOptionExists(newDecisionOption) == false) { //add to the prototype and assign to current agent agent.AddDecisionOption(newDecisionOption, layer, proportionalAI); } else if (agent.AssignedDecisionOptions.Any(decisionOption => decisionOption == newDecisionOption) == false) { var kh = agent.Prototype.DecisionOptions.FirstOrDefault(h => h == newDecisionOption); //assign to current agent only agent.AssignNewDecisionOption(kh, proportionalAI); } if (layer.Set.Layers.Count > 1) { //set consequent to actor's variables for next layers newDecisionOption.Apply(agent); } } }
/// <summary> /// Executes first part of action selection for specific agent and site /// </summary> /// <param name="agent"></param> /// <param name="lastIteration"></param> /// <param name="rankedGoals"></param> /// <param name="processedDecisionOptions"></param> /// <param name="site"></param> public void ExecutePartI(IAgent agent, LinkedListNode <Dictionary <IAgent, AgentState> > lastIteration, Dictionary <IAgent, Goal[]> rankedGoals, DecisionOption[] processedDecisionOptions, Site site) { decisionOptionForActivating = null; AgentState agentState = lastIteration.Value[agent]; AgentState priorPeriod = lastIteration.Previous?.Value[agent]; //adds new decisionOption history for specific site if it doesn't exist if (agentState.DecisionOptionsHistories.ContainsKey(site) == false) { agentState.DecisionOptionsHistories.Add(site, new DecisionOptionsHistory()); } DecisionOptionsHistory history = agentState.DecisionOptionsHistories[site]; processedGoal = rankedGoals[agent].First(g => processedDecisionOptions.First().Layer.Set.AssociatedWith.Contains(g)); goalState = agentState.GoalsState[processedGoal]; matchedDecisionOptions = processedDecisionOptions.Except(history.Blocked).Where(h => h.IsMatch(agent)).ToArray(); if (matchedDecisionOptions.Length == 0) { return; } if (matchedDecisionOptions.Length > 1) { if (priorPeriod != null) { priorPeriodActivatedDecisionOption = priorPeriod.DecisionOptionsHistories[site].Activated.FirstOrDefault(r => r.Layer == processedDecisionOptions.First().Layer); } //set anticipated influence before execute specific logic anticipatedInfluence = agent.AnticipationInfluence; SpecificLogic(processedGoal.Tendency); } else { decisionOptionForActivating = matchedDecisionOptions[0]; } if (processedDecisionOptions.First().Layer.Set.Layers.Count > 1) { decisionOptionForActivating.Apply(agent); } if (decisionOptionForActivating != null) { history.Activated.Add(decisionOptionForActivating); } history.Matched.AddRange(matchedDecisionOptions); if (decisionOptionForActivating != null && decisionOptionForActivating.IsCollectiveAction) { var agents = SignalingInterest(agent, decisionOptionForActivating, lastIteration.Value); if (agents.Count > 0) { foreach (var a in agents) { var agentHistory = lastIteration.Value[a].DecisionOptionsHistories[site]; var layer = decisionOptionForActivating.Layer; if (agentHistory.Activated.Any(h => h.Layer == layer)) { //clean previous choice agentHistory.Activated.RemoveAll(h => h.Layer == layer); agentHistory.Matched.RemoveAll(h => h.Layer == layer); var decisionOpts = a.AssignedDecisionOptions.Where(h => h.Layer == layer).ToArray(); ExecutePartI(a, lastIteration, rankedGoals, decisionOpts, site); } } } } }
void ReadFile(string file) { filepath = file; using (XmlReader reader = XmlReader.Create(filepath)) { GameObject temp; String tempid; // print("name1 <" + reader.Name + "> <" + reader.NodeType + ">"); Read(reader); chapterTitle = reader.GetAttribute("title"); chapterId = reader.GetAttribute("id"); // Debug.Log("Begining read"); int x = 0; while (reader.Name == "list" || Read(reader)) { x++; //Debug.Log("XMLRead: Reading a list!! " + reader.GetAttribute("type")); if (x > 5) { Debug.Log("broke"); break; } //print(getReaderState(reader) + " \"" + reader.GetAttribute("type") + "\""); if (reader.GetAttribute("type") == "evidence") { while (Read(reader) && reader.Name == "evidence") { //Creates evidence gameobjects tempid = reader.GetAttribute("id"); temp = new GameObject("evidence"); temp.AddComponent <Evidence>(); temp.GetComponent <Evidence>().id = tempid; temp.GetComponent <Evidence>().filepath = reader.GetAttribute("filepath"); temp.GetComponent <Evidence>().text = reader.GetAttribute("text"); objects.Add(tempid, temp); } } else if (reader.GetAttribute("type") == "correlation") { while (Read(reader) && reader.Name == "correlation") { // Debug.Log("Reading a correlation"); //Creates correlation gameobjects tempid = reader.GetAttribute("id"); temp = new GameObject("correlation"); temp.AddComponent <Correlation>(); temp.GetComponent <Correlation>().id = tempid; temp.GetComponent <Correlation>().evidence0 = reader.GetAttribute("ev0"); temp.GetComponent <Correlation>().evidence1 = reader.GetAttribute("ev1"); objects.Add(tempid, temp); validCorrelations.Add(temp.GetComponent <Correlation>()); } } else if (reader.GetAttribute("type") == "decision") { Read(reader); while (reader.Name == "decision") { // print("0 " + getReaderState(reader)); // Debug.Log("Reading a decision"); //Creates decision gameobjects tempid = reader.GetAttribute("id"); temp = new GameObject("decision"); temp.AddComponent <Decision>(); temp.GetComponent <Decision>().id = tempid; temp.GetComponent <Decision>().text = reader.GetAttribute("text"); temp.GetComponent <Decision>().decisionOptions = new List <GameObject>(); decisions.Add(temp.GetComponent <Decision>()); // print("1 " + getReaderState(reader)); DecisionOption lastOption = null; while (Read(reader) && (reader.Name == "decisionOption" || reader.Name == "requirements" || reader.Name == "impact" || reader.Name == "impactor")) { if (reader.Name == "decisionOption") { //Creates decisionOption gameobjects GameObject decisionOption = new GameObject("decisionOption"); decisionOption.AddComponent <DecisionOption>(); decisionOption.GetComponent <DecisionOption>().parentId = temp.GetComponent <Decision>().id; decisionOption.GetComponent <DecisionOption>().id = reader.GetAttribute("id"); decisionOption.GetComponent <DecisionOption>().text = reader.GetAttribute("text"); //decisionOption.AddComponent<Requirements>(); decisionOption.transform.parent = temp.transform; lastOption = decisionOption.GetComponent <DecisionOption>(); temp.GetComponent <Decision>().decisionOptions.Add(decisionOption); } else if (reader.Name == "requirements") { lastOption.requirements = ReadRequirements(reader); } else if (reader.Name == "impactor") { Impactor imp = new Impactor(); imp.requiredId = reader.GetAttribute("requiredId"); imp.multiplier = int.Parse(reader.GetAttribute("multiplier")); imp.applicants.AddRange(Regex.Split(reader.GetAttribute("applicants"), ", ")); lastOption.impactors.Add(imp); } else if (reader.Name == "impact") { Impact imp = new Impact(); imp.magnitude = int.Parse(reader.GetAttribute("magnitude")); imp.applicant = reader.GetAttribute("applicant"); lastOption.impacts.Add(imp); } } objects.Add(tempid, temp); //print("2 " + getReaderState(reader)); } } else if (reader.GetAttribute("type") == "phase") { while (Read(reader) && reader.Name == "phase") { temp = new GameObject("phase"); temp.AddComponent <Phase>(); temp.GetComponent <Phase>().id = reader.GetAttribute("id"); temp.GetComponent <Phase>().type = reader.GetAttribute("type"); if (temp.GetComponent <Phase>().type == "evidence") { ReadEvidencePhase(reader, temp); } phases.Add(temp.GetComponent <Phase>()); } } else { Read(reader); } } } }
/// <summary> /// Executes after Innovation. /// </summary> protected virtual void AfterInnovation(IAgent agent, TDataSet dataSet, DecisionOption newDecisionOption) { }
/// <summary> /// Executes SOSIEL Algorithm /// </summary> /// <param name="activeDataSets"></param> protected void RunSosiel(ICollection <TDataSet> activeDataSets) { for (int i = 1; i <= numberOfIterations; i++) { iterationCounter++; PreIterationCalculations(iterationCounter); PreIterationStatistic(iterationCounter); Dictionary <IAgent, AgentState <TDataSet> > currentIteration; if (iterationCounter > 1) { currentIteration = iterations.AddLast(new Dictionary <IAgent, AgentState <TDataSet> >()).Value; } else { currentIteration = iterations.AddLast(InitializeFirstIterationState()).Value; } Dictionary <IAgent, AgentState <TDataSet> > priorIteration = iterations.Last.Previous?.Value; IAgent[] orderedAgents = agentList.ActiveAgents.Randomize(processConfiguration.AgentRandomizationEnabled).ToArray(); var agentGroups = orderedAgents.GroupBy(a => a.Archetype.NamePrefix).OrderBy(group => group.Key).ToArray(); orderedAgents.ForEach(a => { if (iterationCounter > 1) { currentIteration.Add(a, priorIteration[a].CreateForNextIteration()); } currentIteration[a].RankedGoals = a.AssignedGoals.ToArray(); }); if (processConfiguration.UseDemographicProcesses && iterationCounter > 1) { demographic.ChangeDemographic(iterationCounter, currentIteration, agentList); } TDataSet[] orderedDataSets = activeDataSets.Randomize().ToArray(); TDataSet[] notDataSetOriented = new TDataSet[] { DefaultDataSet }; if (iterationCounter == 1) { foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { currentIteration[agent].RankedGoals = gs.SortByImportance(agent, currentIteration[agent].GoalsState).ToArray(); } } } if (processConfiguration.AnticipatoryLearningEnabled && iterationCounter > 1) { //1st round: AL, CT, IR foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { //anticipatory learning process al.Execute(agent, iterations.Last); var agentGoalState = currentIteration[agent].GoalsState; //goal prioritizing gp.Prioritize(agent, agentGoalState); //goal selecting currentIteration[agent].RankedGoals = gs.SortByImportance(agent, agentGoalState).ToArray(); if (processConfiguration.CounterfactualThinkingEnabled) { if (currentIteration[agent].RankedGoals.Any(g => currentIteration[agent].GoalsState.Any(kvp => kvp.Value.Confidence == false))) { foreach (TDataSet dataSet in GetDataSets(agent, orderedDataSets, notDataSetOriented)) { BeforeCounterfactualThinking(agent, dataSet); foreach (var set in agent.AssignedDecisionOptions.GroupBy(h => h.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { //optimization Goal selectedGoal = currentIteration[agent].RankedGoals.First(g => set.Key.AssociatedWith.Contains(g)); GoalState selectedGoalState = currentIteration[agent].GoalsState[selectedGoal]; if (selectedGoalState.Confidence == false) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { if (layer.Key.LayerConfiguration.Modifiable || (!layer.Key.LayerConfiguration.Modifiable && layer.Any(r => r.IsModifiable))) { //looking for matched decision option in prior period DecisionOption[] matchedDecisionOptions = priorIteration[agent].DecisionOptionsHistories[dataSet] .Matched.Where(h => h.Layer == layer.Key).ToArray(); bool?CTResult = null; //counterfactual thinking process if (matchedDecisionOptions.Length >= 2) { CTResult = ct.Execute(agent, iterations.Last, selectedGoal, matchedDecisionOptions, layer.Key, dataSet); } if (processConfiguration.InnovationEnabled) { //innovation process if (CTResult == false || matchedDecisionOptions.Length < 2) { DecisionOption decisionOption = innovation.Execute(agent, iterations.Last, selectedGoal, layer.Key, dataSet, probabilities); AfterInnovation(agent, dataSet, decisionOption); } } } } } } } } } } } } if (processConfiguration.SocialLearningEnabled && iterationCounter > 1) { //2nd round: SL foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (var set in agent.AssignedDecisionOptions.GroupBy(h => h.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { //social learning process sl.ExecuteLearning(agent, iterations.Last, layer.Key); } } } } } if (processConfiguration.DecisionOptionSelectionEnabled) { //AS part I foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (TDataSet dataSet in GetDataSets(agent, orderedDataSets, notDataSetOriented)) { foreach (var set in agent.AssignedDecisionOptions.GroupBy(h => h.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { BeforeActionSelection(agent, dataSet); //satisficing satisficing.ExecutePartI(agent, iterations.Last, currentIteration[agent].RankedGoals, layer.ToArray(), dataSet); } } } } } if (processConfiguration.DecisionOptionSelectionPart2Enabled && iterationCounter > 1) { //4th round: AS part II foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (TDataSet dataSet in GetDataSets(agent, orderedDataSets, notDataSetOriented)) { foreach (var set in agent.AssignedDecisionOptions.GroupBy(r => r.Layer.Set).OrderBy(g => g.Key.PositionNumber)) { foreach (var layer in set.GroupBy(h => h.Layer).OrderBy(g => g.Key.PositionNumber)) { BeforeActionSelection(agent, dataSet); //action selection process part II satisficing.ExecutePartII(agent, iterations.Last, currentIteration[agent].RankedGoals, layer.ToArray(), dataSet); } } } } } } } if (processConfiguration.ActionTakingEnabled) { //5th round: TA foreach (var agentGroup in agentGroups) { foreach (IAgent agent in agentGroup) { foreach (TDataSet dataSet in GetDataSets(agent, orderedDataSets, notDataSetOriented)) { at.Execute(agent, currentIteration[agent], dataSet); AfterActionTaking(agent, dataSet); } } } } if (processConfiguration.AlgorithmStopIfAllAgentsSelectDoNothing && iterationCounter > 1) { if (!currentIteration.SelectMany(kvp => kvp.Value.DecisionOptionsHistories.Values.SelectMany(rh => rh.Activated)).Any()) { algorithmStoppage = true; } } PostIterationCalculations(iterationCounter); PostIterationStatistic(iterationCounter); if (processConfiguration.AgentsDeactivationEnabled && iterationCounter > 1) { AgentsDeactivation(); } AfterDeactivation(iterationCounter); if (processConfiguration.ReproductionEnabled && iterationCounter > 1) { Reproduction(0); } if (algorithmStoppage || agentList.ActiveAgents.Length == 0) { break; } Maintenance(); } }
private int SaveOptionsToDecision(DecisionOption option) { var newOptionID = decisionMakingDataAccess.SaveOptionsToDecision(option); return(newOptionID); }