/** * Execute an action from the contingency plan * * @param percept a percept. * @return an action from the contingency plan. */ public override IAction Execute(IPercept percept) { // check if goal state VacuumEnvironmentState state = (VacuumEnvironmentState)this .getPerceptToStateFunction()(percept); if (state.getLocationState(VacuumEnvironment.LOCATION_A) == VacuumEnvironment.LocationState.Clean && state.getLocationState(VacuumEnvironment.LOCATION_B) == VacuumEnvironment.LocationState.Clean) { return(DynamicAction.NO_OP); } // check stack size if (this.stack.Size() < 1) { if (this.contingencyPlan.Size() < 1) { return(DynamicAction.NO_OP); } else { this.stack.Add(this.getContingencyPlan().Pop()); } } // pop... object currentStep = this.stack.Peek(); // push... if (currentStep is IAction) { return((IAction)this.stack.Pop()); } // case: next step is a plan else if (currentStep is Plan) { Plan newPlan = (Plan)currentStep; if (newPlan.Size() > 0) { this.stack.Add(newPlan.Pop()); } else { this.stack.Pop(); } return(this.Execute(percept)); } // case: next step is an if-then else if (currentStep is IfStateThenPlan) { IfStateThenPlan conditional = (IfStateThenPlan)this.stack.Pop(); this.stack.Add(conditional.ifStateMatches(percept)); return(this.Execute(percept)); } // case: ignore next step if null else if (currentStep == null) { this.stack.Pop(); return(this.Execute(percept)); } else { throw new RuntimeException("Unrecognized contingency plan step."); } }
protected virtual void NotifyEnvironmentViews(IAgent agent, IPercept percept, IAction action) { foreach (IEnvironmentView view in views) { view.AgentActed(agent, percept, action, this); } }
// function REFLEX-VACUUM-AGENT([location, status]) returns an // action public IAction Execute(IPercept percept) { LocalVacuumEnvironmentPercept vep = (LocalVacuumEnvironmentPercept)percept; // if status = Dirty then return Suck if (VacuumEnvironment.LocationState.Dirty == vep .getLocationState()) { return(VacuumEnvironment.ACTION_SUCK); // else if location = A then return Right } else if (VacuumEnvironment.LOCATION_A.Equals(vep .getAgentLocation())) { return(VacuumEnvironment.ACTION_MOVE_RIGHT); } else if (VacuumEnvironment.LOCATION_B.Equals(vep .getAgentLocation())) { // else if location = B then return Left return(VacuumEnvironment.ACTION_MOVE_LEFT); } // Note: This should not be returned if the // environment is correct return(DynamicAction.NO_OP); }
// function REFLEX-VACUUM-AGENT([location, status]) returns an // action public IAction Execute(IPercept percept) { VacuumEnvPercept vep = (VacuumEnvPercept)percept; // if status = Dirty then return Suck if (VacuumEnvironment.LocationState.Dirty == vep .GetLocationState()) { return(VacuumEnvironment.ActionSuck); // else if location = A then return Right } else if (VacuumEnvironment.LocationA == vep .GetAgentLocation()) { return(VacuumEnvironment.ActionMoveRight); } else if (VacuumEnvironment.LocationB == vep .GetAgentLocation()) { // else if location = B then return Left return(VacuumEnvironment.ActionMoveLeft); } // Note: This should not be returned if the // environment is correct return(NoOpAction.NoOp); }
public virtual IAction Execute(IPercept p) { if (this.program != null) { return(this.program.Execute(p)); } return(NoOpAction.NoOp); }
public override IAction Execute(IPercept p) { if (this.actionEnumerator.MoveNext()) { return(this.actionEnumerator.Current); } return(NoOpAction.NoOp); }
public override IAction Execute(IPercept p) { if (!actionList.IsEmpty()) { return(actionList.Pop()); } return(DynamicAction.NO_OP); // no success or at goal }
public void SaveDecision(IPercept percept, IAction action) { var json = JsonConvert.SerializeObject(percept.ToDoubleArray()); _writer.WriteLine(json); json = JsonConvert.SerializeObject(action.Raw); _writer.WriteLine(json); }
public virtual IAction Execute(IPercept p) { if (null != program) { return(program.Execute(p)); } return(DynamicAction.NO_OP); }
// function TABLE-DRIVEN-AGENT(percept) returns an action public IAction Execute(IPercept percept) { // append percept to end of percepts percepts.Add(percept); // action <- LOOKUP(percepts, table) // return action return(lookupCurrentAction()); }
protected override IState UpdateState(IPercept p) { var dp = (DynamicPercept)p; state.SetAttribute(DynAttributeNames.AgentLocation, dp .GetAttribute(DynAttributeNames.PerceptIn)); return(state); }
// function MODEL-BASED-REFLEX-AGENT(percept) returns an action public IAction Execute(IPercept percept) { this.state = this.UpdateState(this.state, this.action, percept, this.model); var rule = this.RuleMatch(this.state, this.rules); this.action = this.RuleAction(rule); return(this.action); }
public void AgentActed(IAgent agent, IPercept percept, IAction action, IEnvironment source) { System.Text.StringBuilder builder = new System.Text.StringBuilder(); int agentId = source.GetAgents().IndexOf(agent) + 1; builder.Append("Agent ").Append(agentId).Append(" acted."); builder.Append("\n Percept: ").Append(percept.ToString()); builder.Append("\n Action: ").Append(action.ToString()); System.Console.WriteLine(builder); }
// function SIMPLE-RELEX-AGENT(percept) returns an action public IAction Execute(IPercept percept) { // state <- INTERPRET-INPUT(percept); ObjectWithDynamicAttributes state = interpretInput(percept); // rule <- RULE-MATCH(state, rules); Rule rule = ruleMatch(state, rules); // action <- rule.ACTION; // return action return(ruleAction(rule)); }
// function SIMPLE-RELEX-AGENT(percept) returns an action public IAction Execute(IPercept percept) { // state <- INTERPRET-INPUT(percept); var state = InterpretInput(percept); // rule <- RULE-MATCH(state, rules); var rule = RuleMatch(state, rules); // action <- rule.ACTION; // return action return(ruleAction(rule)); }
// function MODEL-BASED-REFLEX-AGENT(percept) returns an action public IAction Execute(IPercept percept) { // state <- UPDATE-STATE(state, action, percept, model) state = updateState(state, action, percept, model); // rule <- RULE-MATCH(state, rules) Rule rule = ruleMatch(state, rules); // action <- rule.ACTION action = ruleAction(rule); // return action return(action); }
/// <summary> /// function SIMPLE-PROBLEM-SOLVING-AGENT(percept) returns an action /// </summary> /// <param name="p"></param> /// <returns></returns> public override IAction Execute(IPercept p) { IAction action = NoOpAction.NoOp; // state <- UPDATE-STATE(state, percept) this.UpdateState(p); // if seq is empty then do if (0 == this.seq.Count) { if (this.formulateGoalsIndefinitely || this.goalsFormulated < this.maxGoalsToFormulate) { if (this.goalsFormulated > 0) { this.NotifyViewOfMetrics(); } // goal <- FORMULATE-GOAL(state) var goal = this.FormulateGoal(); this.goalsFormulated++; // problem <- FORMULATE-PROBLEM(state, goal) var problem = this.FormulateProblem(goal); // seq <- SEARCH(problem) seq = this.Search(problem); if (this.seq.Count == 0) { // Unable to identify a path seq.Add(NoOpAction.NoOp); } } else { // Agent no longer wishes to // achieve any more goals Alive = false; this.NotifyViewOfMetrics(); } } if (this.seq.Count > 0) { // action <- FIRST(seq) action = Util.Util.First(this.seq); // seq <- REST(seq) this.seq = Util.Util.Rest(this.seq); } return(action); }
/// <summary> /// Central template method for controlling agent simulation. The concrete /// behavior is determined by the primitive operations /// #getPerceptSeenBy(Agent), #executeAction(Agent, Action), /// and #createExogenousChange(). /// </summary> public virtual void Step() { foreach (IAgent agent in agents) { if (agent.IsAlive()) { IPercept percept = getPerceptSeenBy(agent); IAction anAction = agent.Execute(percept); executeAction(agent, anAction); NotifyEnvironmentViews(agent, percept, anAction); } } CreateExogenousChange(); }
public override IAction Execute(IPercept p) { if (p is IPerceptStateReward <S> ) { IAction a = execute((IPerceptStateReward <S>)p); if (null == a) { a = DynamicAction.NO_OP; SetAlive(false); } return(a); } throw new IllegalArgumentException("Percept passed in must be a PerceptStateReward"); }
// function KB-AGENT(percept) returns an action public override IAction Execute(IPercept percept) { // TELL(KB, MAKE-PERCEPT-SENTENCE(percept, t)) KB.tell(makePerceptSentence(percept, t)); // action <- ASK(KB, MAKE-ACTION-QUERY(t)) IAction action = ask(KB, makeActionQuery(t)); // TELL(KB, MAKE-ACTION-SENTENCE(action, t)) KB.tell(makeActionSentence(action, t)); // t <- t + 1 t = t + 1; // return action return(action); }
public IAction Exec(IPercept p) { var state = UpdateState(null, p); if (!_seq.Any()) { var goal = FormulateGoal(state); var problem = FromulateProblem(goal); _seq = Search(problem); } var action = _seq.FirstOrDefault() ?? new NoOpAction(); var seqLen = _seq.Count(); _seq = _seq.Skip(1).Take(seqLen); return action; }
public IAction Exec(IPercept p) { var state = UpdateState(null, p); if (!_seq.Any()) { var goal = FormulateGoal(state); var problem = FromulateProblem(goal); _seq = Search(problem); } var action = _seq.FirstOrDefault() ?? new NoOpAction(); var seqLen = _seq.Count(); _seq = _seq.Skip(1).Take(seqLen); return(action); }
// function SIMPLE-PROBLEM-SOLVING-AGENT(percept) returns an action public override IAction Execute(IPercept p) { IAction action = DynamicAction.NO_OP; // return value if at goal or goal not found // state <- UPDATE-STATE(state, percept) updateState(p); // if seq is empty then do if (seq.IsEmpty()) { if (formulateGoalsIndefinitely || goalsFormulated < maxGoalsToFormulate) { if (goalsFormulated > 0) { notifyViewOfMetrics(); } // goal <- FORMULATE-GOAL(state) object goal = formulateGoal(); goalsFormulated++; // problem <- FORMULATE-PROBLEM(state, goal) IProblem <S, A> problem = formulateProblem(goal); // seq <- SEARCH(problem) ICollection <A> actions = search(problem); if (null != actions) { seq.AddAll(actions); } } else { // Agent no longer wishes to // achieve any more goals SetAlive(false); notifyViewOfMetrics(); } } if (seq.Size() > 0) { // action <- FIRST(seq) // seq <- REST(seq) action = seq.Pop(); } return(action); }
/// <summary> /// Résolution du problème: /// - mise à jour de l'état ([UpdateState(IState, IPercept) : IState]) /// - si plan d'action vide : /// - formulation de l'objectif ([FormulateGoal(IState) : IState]) /// - formulation du problème ([FormulateProblem(IState, Istate]: IProblem) /// - exploration ([AgentFunction.Search(IProblem):List<Action>]) /// /// @param percept: la perception actuelle de l'environnement /// @return IAction: la prochaine action a effectué, null si failure /// </summary> private IAction SimpleProblemSolvingAgent(IPercept percept) { MentalState.Belief = UpdateState(MentalState.Belief, percept); if (!MentalState.Intention.Any()) { MentalState.Desire = FormulateGoal(MentalState.Belief); var problem = FormulateProblem(MentalState.Belief, MentalState.Desire); MentalState.Intention = _function.Search(problem) ?? new List <IAction>(); } if (MentalState.Intention.Any()) { var action = MentalState.Intention.First(); MentalState.Intention.RemoveAt(0); return(action); } return(null); }
public NeuralNetAction Think(IPercept percept) { var data = percept.ToDoubleArray(); if (data.Length != _model.GetInputDimension().c) { throw new ArgumentOutOfRangeException(nameof(ArgumentOutOfRangeException), $"Percept count should be {_model.GetInputDimension().c}, but is {data.Length}, check if the " + $"input layer in DefaultLayer is set to the right value (currently: {_layers[0]})"); } var input = new Data2D(1, 1, _layers[0], 1); input.SetData(data); var result = (Data2D)_model.ExecuteNetwork(input); return(new NeuralNetAction(result.ToDoubleArray(_layers))); }
/** * Template method, which corresponds to pseudo code function * <code>PROBLEM-SOLVING-AGENT(percept)</code>. * * @return an action */ public override IAction Execute(IPercept p) { IAction action = DynamicAction.NO_OP; // state <- UPDATE-STATE(state, percept) updateState(p); // if plan is empty then do while (plan.IsEmpty()) { // state.goal <- FORMULATE-GOAL(state) object goal = formulateGoal(); if (null != goal) { // problem <- FORMULATE-PROBLEM(state, goal) IProblem <S, A> problem = formulateProblem(goal); // state.plan <- SEARCH(problem) ICollection <A> actions = search(problem); if (null != actions) { plan.AddAll(actions); } else if (!tryWithAnotherGoal()) { // unable to identify a path SetAlive(false); break; } } else { // no further goal to achieve SetAlive(false); break; } } if (!plan.IsEmpty()) { // action <- FIRST(plan) // plan <- REST(plan) action = plan.Pop(); } return(action); }
protected override DynamicState UpdateState(DynamicState envState, IAction anAction, IPercept percept, IModel model) { VacuumEnvPercept vep = (VacuumEnvPercept)percept; envState.SetAttribute(AttributeCurrentLocation, vep .GetAgentLocation()); envState.SetAttribute(AttributeCurrentState, vep .GetLocationState()); // Keep track of the state of the different locations if (VacuumEnvironment.LocationA == vep.GetAgentLocation()) { envState.SetAttribute(AttributeStateLocationA, vep .GetLocationState()); } else { envState.SetAttribute(AttributeStateLocationB, vep .GetLocationState()); } return(envState); }
protected override DynamicState updateState(DynamicState state, IAction anAction, IPercept percept, MODEL model) { LocalVacuumEnvironmentPercept vep = (LocalVacuumEnvironmentPercept)percept; state.SetAttribute(ATTRIBUTE_CURRENT_LOCATION, vep.getAgentLocation()); state.SetAttribute(ATTRIBUTE_CURRENT_STATE, vep.getLocationState()); // Keep track of the state of the different locations if (VacuumEnvironment.LOCATION_A.Equals(vep.getAgentLocation())) { state.SetAttribute(ATTRIBUTE_STATE_LOCATION_A, vep.getLocationState()); } else { state.SetAttribute(ATTRIBUTE_STATE_LOCATION_B, vep.getLocationState()); } return(state); }
public void AgentActed(IAgent agent, IPercept percept, IAction action, IEnvironment source) { envChanges.Append(action).Append(":"); }
protected abstract IState UpdateState(IState s, IPercept p);
/// <summary> /// @param state : état courant de l'agent (note: pas besoin pour le robot aspirateur) /// @param percept : perception actuelle de l'environnement /// @return : le nouvel état généré /// </summary> protected abstract IState UpdateState(IState state, IPercept percept);
// function LRTA*-AGENT(s') returns an action // inputs: s', a percept that identifies the current state public override IAction Execute(IPercept psPrime) { object sPrime = this.PerceptToStateFunction.GetState(psPrime); // if GOAL-TEST(s') then return stop if (this.GoalTest(sPrime)) { a = NoOpAction.NoOp; } else { // if s' is a new state (not in H) then H[s'] <- h(s') if (!this.h.ContainsKey(sPrime)) { this.h[sPrime] = this.HeuristicFunction.H(sPrime); } // if s is not null if (null != this.s) { // result[s, a] <- s' this.result[new StateAction(this.s, this.a)] = sPrime; // H[s] <- min LRTA*-COST(s, b, result[s, b], H) // b (element of) ACTIONS(s) double minimum = double.MaxValue; foreach (IAction b in this.Actions(this.s)) { double cost = this.LRTACost(this.s, b, this.result[new StateAction(this.s, b)]); if (cost < minimum) { minimum = cost; } } this.h[this.s] = minimum; } // a <- an action b in ACTIONS(s') that minimizes LRTA*-COST(s', b, // result[s', b], H) double min = double.MaxValue; // Just in case no actions this.a = NoOpAction.NoOp; foreach (IAction b in this.Actions(sPrime)) { double cost = this.LRTACost(sPrime, b, this.result[new StateAction(sPrime, b)]); if (cost < min) { min = cost; this.a = b; } } } // s <- s' this.s = sPrime; if (this.a.IsNoOp()) { // I'm either at the Goal or can't get to it, // which in either case I'm finished so just die. Alive = false; } // return a return(this.a); }