/// <summary> /// Generates a percept for the given instance of time that is meant to represent /// all data relevant to the agent for making a desicion. /// </summary> /// <returns>The percept at this instance.</returns> Percept getPerceptAtThisInstance() { // Make a new Percept Percept newPercept = new Percept(); // Formula for distance of chickens, necessary for team calculations // The distance we are away from our target. // float distanceFromTarget = Vector3.Distance (control.transform.position, target.transform.position); // Calculate values for all of percept fields in order of struct definition float curHealth = control.getCurrentHealth(); // Find number of teammates and: // public float[] teamatesHealths; // public float[] teamatesDistancesFromUs; // public float[] teamatesDistancesFromTarget; float targetHealth = target.getCurrentHealth(); // public int numOfTargetAllies; // public float[] targetsAlliesHealths; // public float[] targetsAlliesDistancesFromTarget; // public float[] targetsAlliesDistancesFromUs; ChickenState targetCurrentState = target.getCurrentChickenState(); // Now assign all values to the struct in order of struct definition newPercept.curHealth = curHealth; newPercept.targetHealth = targetHealth; newPercept.targetState = targetCurrentState; // Finally return the new Percept instance return(newPercept); }
public void OnRaid(RaidType raid, Location location, int turn) { if (m_Actor.IsSleeping) { return; } string raidDesc; switch (raid) { case RaidType.ARMY_SUPLLIES: raidDesc = "a chopper hovering"; break; case RaidType.BIKERS: raidDesc = "motorcycles coming"; break; case RaidType.BLACKOPS: raidDesc = "a chopper hovering"; break; case RaidType.GANGSTA: raidDesc = "cars coming"; break; case RaidType.NATGUARD: raidDesc = "the army coming"; break; case RaidType.SURVIVORS: raidDesc = "honking coming"; break; default: throw new ArgumentOutOfRangeException(String.Format("unhandled raidtype {0}", raid.ToString())); } m_LastRaidHeard = new Percept(raidDesc, turn, location); }
public override Actuator process(Percept p) { currentTimestamp++; Actuator action = new Nothing(); if (p is LidarSensorInput) { LidarSensorInput lidarPerception = (LidarSensorInput)p; double currDistance = lidarPerception.DistanceTo; if (hasState) { if (currDistance < prevDistance) { double relativeVelocity = (prevDistance - currDistance) / (currentTimestamp - prevDistanceTimestamp); double timeUntilCollision = currDistance / relativeVelocity; if (timeUntilCollision < 5) { action = new Brake(); } } } // Update agent state with new input prevDistanceTimestamp = currentTimestamp; prevDistance = currDistance; hasState = true; } return(action); }
protected override ActorAction SelectAction(RogueGame game, List <Percept> percepts) { List <Percept> mapPercepts = FilterSameMap(game, percepts); //////////////////////////////////////////// // 1 move in straight line to nearest enemy // 2 idle? % chance. // 3 wander //////////////////////////////////////////// // 1 move in straight line to nearest enemy Percept nearestEnemy = FilterNearest(game, FilterEnemies(game, mapPercepts)); if (nearestEnemy != null) { ActorAction bumpAction = BehaviorStupidBumpToward(game, nearestEnemy.Location.Position); if (bumpAction != null) { m_Actor.Activity = Activity.CHASING; m_Actor.TargetActor = nearestEnemy.Percepted as Actor; return(bumpAction); } } // 2 idle? % chance. if (game.Rules.RollChance(IDLE_CHANCE)) { m_Actor.Activity = Activity.IDLE; return(new ActionWait(m_Actor, game)); } // 3 wander m_Actor.Activity = Activity.IDLE; return(BehaviorWander(game)); }
public override void OnInspectorGUI() { DrawDefaultInspector(); Percept myTarget = (Percept)target; GUILayout.Label("Percepts :", EditorStyles.boldLabel); if (GUILayout.Button((showPercept) ? "Hide Percepts" : "Show Percepts")) { showPercept = !showPercept; } if (showPercept) { foreach (string key in myTarget._percepts.Keys) { GUIStyle s = new GUIStyle(); s.normal.textColor = new Color(0.6f, 0.1f, 0.1f); if (myTarget._percepts[key]()) { s.normal.textColor = new Color(0.1f, 0.6f, 0.1f); } EditorGUILayout.LabelField("> " + key, s); } } }
protected virtual void Compute() { UpdateEditorProps(); //if (_frames % 5 == 0) //{ Percept = Sensor.PerceiveEnvironment(); Percept.Normalize(); //} if (_frames % 30 == 0) { OnTrack = IsOnTrack(); } RightDirection = DetermineDirection(); _frames++; Speed = OnTrack ? MaxSpeed : MaxSpeed / 4f; Speed *= Rigidbody.drag * 2; if (SpeedIncreaseTime > 0) { Speed *= SpeedIncreaseFactor; SpeedIncreaseTime -= (int)(Time.fixedDeltaTime * 1000); } else { SpeedIncreaseTime = 0; } }
private void sendPercept(Percept percept) { // convert percept to xml string // send it over the wire streamWriter.Write(percept.ToString()); streamWriter.Flush(); }
// // START-Agent public virtual Action execute(Percept p) { if (null != program) { return(program.execute(p)); } return(NoOpAction.NO_OP); }
// // PROTECTED METHODS // protected override State updateState(Percept p) { DynamicPercept dp = (DynamicPercept) p; state.setAttribute(DynAttributeNames.AGENT_LOCATION, dp .getAttribute(DynAttributeNames.PERCEPT_IN)); return state; }
//START-Agent public virtual Action execute(Percept p) { if(null != program) { return program.execute(p); } return NoOpAction.NO_OP; }
// // START-Agent // Se marca como virtual para que luego se pueda sobreescribir public virtual Action Execute(Percept p) { if (program != null) { return(program.Execute(p)); } return(NoOpAction.NO_OP); }
// PROTECTED METHODS protected override State updateState(Percept p) { DynamicPercept dp = (DynamicPercept)p; state.setAttribute(DynAttributeNames.AGENT_LOCATION, dp.getAttribute(DynAttributeNames.PERCEPT_IN)); return(state); }
// // START-AgentProgram // function TABLE-DRIVEN-AGENT(percept) returns an action public Action execute(Percept percept) { // append percept to end of percepts percepts.Add(percept); // action <- LOOKUP(percepts, table) // return action return lookupCurrentAction(); }
/// <summary> /// What the apeal is to the agent for getting away from it's target /// The higher the rating the higher the appeal value, and the more likely /// the agent is for running away. /// </summary> /// <returns>The appeal of invading the target</returns> /// <param name="percept">Percept, environment we're evaluating</param> float appealOfEvading(Percept percept) { // If the enemy is dashing and we are low health, we want to evade if (target.getCurrentChickenState() == ChickenState.Dashing) { return(1 - (percept.curHealth / control.getMaxHealth())); } return(0); }
public void perceive(Percept p) { p.addEntitiesRange(perceptObjects <Agent>("agent").Cast <IPerceivableEntity>().ToList()); p.addEntitiesRange(perceptObjects <Gold> ("gold").Cast <IPerceivableEntity>().ToList()); p.addEntitiesRange(perceptObjects <Inn> ("inn").Cast <IPerceivableEntity>().ToList()); p.addEntitiesRange(perceptObjects <Grave> ("grave").Cast <IPerceivableEntity>().ToList()); p.addEntitiesRange(perceptObjects <Potion> ("potion").Cast <IPerceivableEntity>().ToList()); p.addEntitiesRange(perceptNodes().Cast <IPerceivableEntity>().ToList()); }
// // START-AgentProgram // function TABLE-DRIVEN-AGENT(percept) returns an action public Action execute(Percept percept) { // append percept to end of percepts percepts.Add(percept); // action <- LOOKUP(percepts, table) // return action return(lookupCurrentAction()); }
// // START-AgentProgram // function SIMPLE-RELEX-AGENT(percept) returns an action public Action execute(Percept percept) { // state <- INTERPRET-INPUT(percept); ObjectWithDynamicAttributes state = interpretInput(percept); // rule <- RULE-MATCH(state, rules); Rule rule = ruleMatch(state, rules); // action <- rule.ACTION; // return action return ruleAction(rule); }
public string execute(Percept p) { if (actionIterator.MoveNext()) { return actionIterator.Current.ToString(); } else { return "NoOp"; } }
//START-AgentProgram // function MODEL-BASED-REFLEX-AGENT(percept) returns an action public Action execute(Percept percept) { // state <- UPDATE-STATE(state, action, percept, model) state = updateState(state, action, percept, model); // rule <- RULE-MATCH(state, rules) Rule rule = ruleMatch(state, rules); // action <- rule.ACTION action = ruleAction(rule); // return action return action; }
public string execute(Percept p) { if (actionIterator.MoveNext()) { return(actionIterator.Current.ToString()); } else { return("NoOp"); } }
// START-AgentProgram // function SIMPLE-RELEX-AGENT(percept) returns an action public Action execute(Percept percept) { // state <- INTERPRET-INPUT(percept); ObjectWithDynamicAttributes state = interpretInput(percept); // rule <- RULE-MATCH(state, rules); Rule rule = ruleMatch(state, rules); // action <- rule.ACTION; // return action return(ruleAction(rule)); }
public void SaveGameFile() { string path = Application.streamingAssetsPath + "/Warbot/" + _gameName + ".gameset"; //Write some text to the test.txt file StreamWriter writer = new StreamWriter(path, false); foreach (GameObject unit in _listUnitGameObject) { writer.WriteLine("<"); writer.WriteLine(unit.GetComponent <Stats>()._unitType); // Recuperer les percepts Percept unitPercepts = unit.GetComponent <Percept>(); unitPercepts.InitPercept(); foreach (string s in unitPercepts._percepts.Keys) { if (s.Contains("PERCEPT")) { writer.WriteLine("[PERCEPTS]" + s); } if (s.Contains("MESSAGE")) { writer.WriteLine("[MESSAGE]ACTN_" + s.Replace("PERCEPT_", "")); } } // Recuperer les actions Action unitAction = unit.GetComponent <Action>(); unitAction.InitAction(); foreach (string s in unitAction._actions.Keys) { writer.WriteLine("[ACTIONS]" + s); } // Recuperer les actions ActionNonTerminal unitActionNonTerminal = unit.GetComponent <ActionNonTerminal>(); unitActionNonTerminal.InitActionNonTerminal(); foreach (string s in unitActionNonTerminal._actionsNT.Keys) { if (!s.Contains("ACTN_MESSAGE_")) { writer.WriteLine("[ANT]" + s); } } writer.WriteLine(">"); } writer.Close(); print("Done !"); }
public override Action execute(Percept p) { if (actionIterator.MoveNext()) { return(actionIterator.Current); } else { return(NoOpAction.NO_OP); } }
public override Action execute(Percept p) { if (actionIterator.MoveNext()) { return actionIterator.Current; } else { return NoOpAction.NO_OP; } }
//START-AgentProgram // function MODEL-BASED-REFLEX-AGENT(percept) returns an action public Action execute(Percept percept) { // state <- UPDATE-STATE(state, action, percept, model) state = updateState(state, action, percept, model); // rule <- RULE-MATCH(state, rules) Rule rule = ruleMatch(state, rules); // action <- rule.ACTION action = ruleAction(rule); // return action return(action); }
public void LoadBehaviour() { if (GetComponent <Stats>()._teamIndex < GameObject.Find("GameManager").GetComponent <TeamManager>()._teams.Count) { _instructions = GameObject.Find("GameManager").GetComponent <TeamManager>().getUnitsBevahiours(GetComponent <Stats>()._teamIndex, GetComponent <Stats>()._unitType); } _componentPercepts = GetComponent <Percept>(); _componentActions = GetComponent <Action>(); _componentActionsNonTerminales = GetComponent <ActionNonTerminal>(); //_messageManager = new MessageManager(this.gameObject); _messageManager = GetComponent <MessageManager>(); }
// La función KB-AGENT(percept) devuelve una acción public override Action Execute(Percept percept) { // TELL(KB, MAKE-PERCEPT-SENTENCE(percept, t)) KB.Tell(MakePerceptSentence(percept, t)); // action <- ASK(KB, MAKE-ACTION-QUERY(t)) Action action = Ask(KB, MakeActionQuery(t)); // TELL(KB, MAKE-ACTION-SENTENCE(action, t)) KB.Tell(MakeActionSentence(action, t)); t = t + 1; return(action); }
// function KB-AGENT(percept) returns an action public override Action execute(Percept percept) { // TELL(KB, MAKE-PERCEPT-SENTENCE(percept, t)) KB.tell(makePerceptSentence(percept, t)); // action <- ASK(KB, MAKE-ACTION-QUERY(t)) Action action = ask(KB, makeActionQuery(t)); // TELL(KB, MAKE-ACTION-SENTENCE(action, t)) KB.tell(makeActionSentence(action, t)); // t <- t + 1 t = t + 1; // return action return(action); }
// function SIMPLE-PROBLEM-SOLVING-AGENT(percept) returns an action public override Action execute(Percept p) { Action action = NoOpAction.NO_OP; // state <- UPDATE-STATE(state, percept) updateState(p); // if seq is empty then do if (0 == seq.Count) { if (formulateGoalsIndefinitely || goalsFormulated < maxGoalsToFormulate) { if (goalsFormulated > 0) { notifyViewOfMetrics(); } // goal <- FORMULATE-GOAL(state) System.Object goal = formulateGoal(); goalsFormulated++; // problem <- FORMULATE-PROBLEM(state, goal) Problem problem = formulateProblem(goal); // seq <- SEARCH(problem) seq.AddRange(search(problem)); if (0 == seq.Count) { // Unable to identify a path seq.Add(NoOpAction.NO_OP); } } else { // Agent no longer wishes to // achieve any more goals setAlive(false); notifyViewOfMetrics(); } } if (seq.Count > 0) { // action <- FIRST(seq) action = Util.first(seq); // seq <- REST(seq) seq = Util.rest(seq); } return action; }
// function SIMPLE-PROBLEM-SOLVING-AGENT(percept) returns an action public override Action execute(Percept p) { Action action = NoOpAction.NO_OP; // state <- UPDATE-STATE(state, percept) updateState(p); // if seq is empty then do if (0 == seq.Count) { if (formulateGoalsIndefinitely || goalsFormulated < maxGoalsToFormulate) { if (goalsFormulated > 0) { notifyViewOfMetrics(); } // goal <- FORMULATE-GOAL(state) System.Object goal = formulateGoal(); goalsFormulated++; // problem <- FORMULATE-PROBLEM(state, goal) Problem problem = formulateProblem(goal); // seq <- SEARCH(problem) seq.AddRange(search(problem)); if (0 == seq.Count) { // Unable to identify a path seq.Add(NoOpAction.NO_OP); } } else { // Agent no longer wishes to // achieve any more goals setAlive(false); notifyViewOfMetrics(); } } if (seq.Count > 0) { // action <- FIRST(seq) action = Util.first(seq); // seq <- REST(seq) seq = Util.rest(seq); } return(action); }
public void generatePercepts() { MailBox <PerceptRequest> requests = simulationState.perceptRequests; PerceptRequest request; Percept percept; // aca hay que sacar todos los requests de percepciones de la cola // y para cada uno, generar la percepcion correspondiente while (requests.NotEmpty()) { if (requests.NBRecv(out request)) { percept = new Percept(simulationState, request.agentID); request.agentPerceptMailbox.Send(percept); } } }
public Edge execute(Percept p) { Edge action = null; // state <- UPDATE-STATE(state, percept) updateState(p); // if seq is empty then do if (seq.Count == 0) { if (formulateGoalsIndefinitely || goalsFormulated < maxGoalsToFormulate) { // goal <- FORMULATE-GOAL(state) Vertex goal = formulateGoal(); goalsFormulated++; // problem <- FORMULATE-PROBLEM(state, goal) Problem problem = formulateProblem(state, goal); // seq <- SEARCH(problem) seq.AddRange(search(problem)); if (seq.All(e => e == null)) { // Unable to identify a path } } else { // Agent no longer wishes to // achieve any more goals } } if (seq.Count > 0) { // action <- FIRST(seq) action = seq.First(); // seq <- REST(seq) seq = seq.Skip(1).ToList(); } return action; }
public Edge execute(Percept p) { Edge action = null; // state <- UPDATE-STATE(state, percept) updateState(p); // if seq is empty then do if (seq.Count == 0) { if (formulateGoalsIndefinitely || goalsFormulated < maxGoalsToFormulate) { // goal <- FORMULATE-GOAL(state) Vertex goal = formulateGoal(); goalsFormulated++; // problem <- FORMULATE-PROBLEM(state, goal) Problem problem = formulateProblem(state, goal); // seq <- SEARCH(problem) seq.AddRange(search(problem)); if (seq.All(e => e == null)) { // Unable to identify a path } } else { // Agent no longer wishes to // achieve any more goals } } if (seq.Count > 0) { // action <- FIRST(seq) action = seq.First(); // seq <- REST(seq) seq = seq.Skip(1).ToList(); } return(action); }
/// <summary> /// What the apeal is to the agent for executing an attack on the target. /// The higher the rating the higher the appeal value, and the more likely /// the agent is to attack. /// </summary> /// <returns>The appeal of attacking the target</returns> /// <param name="percept">Percept, environment we're evaluating</param> float appealOfAttacking(Percept percept) { // If the enemy is not dashing and we are at med-high health, we want to attack if (target.getCurrentChickenState() != ChickenState.Dashing) { // If enemy is within attack range (roughly 3.4 units) if (Vector3.Distance(target.transform.position, control.transform.position) <= 3.5) { // If we have moderate HP if (percept.curHealth / control.getMaxHealth() >= .45F) { return 1; } // If we have subpar HP if (percept.curHealth / control.getMaxHealth() < .45F) { // If we have teammates nearby if (percept.numOfTeamates > 1) { return 1; } return 0.5F; // Attack if we must } } } // If enemy is dashing if (target.getCurrentChickenState() == ChickenState.Dashing) { if (percept.curHealth / control.getMaxHealth() >= .45F) { return 0.5F; // If we have moderate HP, might as well attack } return 0.3F; // Only attack if we really need to } return 0; }
/// <summary> /// What the apeal is to the agent for executing an attack on the target. /// The higher the rating the higher the appeal value, and the more likely /// the agent is to attack. /// </summary> /// <returns>The appeal of attacking the target</returns> /// <param name="percept">Percept, environment we're evaluating</param> float appealOfAttacking(Percept percept) { // If the enemy is not dashing and we are at med-high health, we want to attack if (target.getCurrentChickenState() != ChickenState.Dashing) { // If enemy is within attack range (roughly 3.4 units) if (Vector3.Distance(target.transform.position, control.transform.position) <= 3.5) { // If we have moderate HP if (percept.curHealth / control.getMaxHealth() >= .45F) { return(1); } // If we have subpar HP if (percept.curHealth / control.getMaxHealth() < .45F) { // If we have teammates nearby if (percept.numOfTeamates > 1) { return(1); } return(0.5F); // Attack if we must } } } // If enemy is dashing if (target.getCurrentChickenState() == ChickenState.Dashing) { if (percept.curHealth / control.getMaxHealth() >= .45F) { return(0.5F); // If we have moderate HP, might as well attack } return(0.3F); // Only attack if we really need to } return(0); }
/// <summary> /// The update called to run the agents thinking and allows it to make it's moves. /// </summary> void AIUpdate() { Percept currentPercept = getPerceptAtThisInstance(); float attackingAppeal = appealOfAttacking(currentPercept); float evadingAppeal = appealOfEvading(currentPercept); float invadingAppeal = appealOfInvading(currentPercept); if (invadingAppeal >= evadingAppeal && invadingAppeal >= attackingAppeal) { invadeUpdate(); } else if (attackingAppeal >= evadingAppeal && attackingAppeal >= invadingAppeal) { attackUpdate(); } else if (evadingAppeal >= attackingAppeal && evadingAppeal >= invadingAppeal) { evadeUpdate(); } }
// END-AgentProgram // // // PROTECTED METHODS // protected ObjectWithDynamicAttributes interpretInput(Percept p) { return (DynamicPercept) p; }
/// <summary> /// Update function for evading the target /// If the AI has made the desicion that it wants to run away, /// it will start calling this function /// </summary> void evadeUpdate(Percept percept) { if (Vector3.Distance(target.transform.position, control.transform.position) <= 4) { int choice = (int)Random.Range(0F, 3F); switch (choice) { case 0: control.dashBack(); break; case 1: control.dashLeft(); break; case 2: control.dashRight(); break; case 3: control.dashBack(); break; } } else { control.moveBackward(); } }
protected override ActorAction SelectAction(RogueGame game, List <Percept> percepts) { HashSet <Point> FOV = m_LOSSensor.FOV; List <Percept> mapPercepts = FilterSameMap(game, percepts); // alpha10 // don't run by default. m_Actor.IsRunning = false; // 0. Equip best item ActorAction bestEquip = BehaviorEquipBestItems(game, true, true); if (bestEquip != null) { return(bestEquip); } // end alpha10 // 1. Follow order if (this.Order != null) { ActorAction orderAction = ExecuteOrder(game, this.Order, mapPercepts, m_Exploration); if (orderAction == null) { SetOrder(null); } else { m_Actor.Activity = Activity.FOLLOWING_ORDER; return(orderAction); } } ////////////////////////////////////////////////////////////////////// // partial copy of Civilian AI 8) but always courageous and gets into fights. // BEHAVIOR // - FLAGS // "courageous" : always if not tired. // - RULES // alpha10 OBSOLETE 1 equip weapon/armor // 2 fire at nearest. // 3 shout, fight or flee. // 4 use medecine // 5 rest if tired // // alpa10 obsolete and redundant with rule 3!! 6 charge enemy if courageous // 7 eat when hungry (also eat corpses) // 8 sleep. // 9 drop light/tracker with no batteries // alpa10 OBSOLETE 10 equip light/tracker // 11 get nearby item (not if seeing enemy) // 12 steal item from someone. // 13 tear down barricade // 14 follow leader // 15 take lead (if leadership) // 16 (leader) don't leave follower behind. // 17 explore // 18 wander ////////////////////////////////////////////////////////////////////// // get data. List <Percept> allEnemies = FilterEnemies(game, mapPercepts); List <Percept> currentEnemies = FilterCurrent(game, allEnemies); bool hasCurrentEnemies = currentEnemies != null; bool hasAnyEnemies = allEnemies != null; bool checkOurLeader = m_Actor.HasLeader && !DontFollowLeader; bool seeLeader = checkOurLeader && FOV.Contains(m_Actor.Leader.Location.Position); bool isLeaderFighting = checkOurLeader && IsAdjacentToEnemy(game, m_Actor.Leader); bool isCourageous = !game.Rules.IsActorTired(m_Actor); // exploration. m_Exploration.Update(m_Actor.Location); // alpha10 needed due to uggraded get item behavior // clear taboo tiles : periodically or when changing maps. if (m_Actor.Location.Map.LocalTime.TurnCounter % WorldTime.TURNS_PER_HOUR == 0 || (PrevLocation != null && PrevLocation.Map != m_Actor.Location.Map)) { ClearTabooTiles(); } // 2 fire at nearest enemy (always if has leader, half of the time if not) if (hasCurrentEnemies && (checkOurLeader || game.Rules.RollChance(50))) { List <Percept> fireTargets = FilterFireTargets(game, currentEnemies); if (fireTargets != null) { Percept nearestTarget = FilterNearest(game, fireTargets); ActorAction fireAction = BehaviorRangedAttack(game, nearestTarget); if (fireAction != null) { m_Actor.Activity = Activity.FIGHTING; m_Actor.TargetActor = nearestTarget.Percepted as Actor; return(fireAction); } } } // 3 shout, fight or flee if (hasCurrentEnemies) { // shout? if (game.Rules.RollChance(50)) { List <Percept> friends = FilterNonEnemies(game, mapPercepts); if (friends != null) { ActorAction shoutAction = BehaviorWarnFriends(game, friends, FilterNearest(game, currentEnemies).Percepted as Actor); if (shoutAction != null) { m_Actor.Activity = Activity.IDLE; return(shoutAction); } } } // fight or flee. RouteFinder.SpecialActions allowedChargeActions = RouteFinder.SpecialActions.JUMP | RouteFinder.SpecialActions.DOORS; // alpha10 // gangs are allowed to make a mess :) allowedChargeActions |= RouteFinder.SpecialActions.BREAK | RouteFinder.SpecialActions.PUSH; ActorAction fightOrFlee = BehaviorFightOrFlee(game, currentEnemies, seeLeader, isLeaderFighting, ActorCourage.COURAGEOUS, FIGHT_EMOTES, allowedChargeActions); if (fightOrFlee != null) { return(fightOrFlee); } } // 4 use medecine ActorAction useMedAction = BehaviorUseMedecine(game, 2, 1, 2, 4, 2); if (useMedAction != null) { m_Actor.Activity = Activity.IDLE; return(useMedAction); } // 5 rest if tired ActorAction restAction = BehaviorRestIfTired(game); if (restAction != null) { m_Actor.Activity = Activity.IDLE; return(new ActionWait(m_Actor, game)); } // 7 eat when hungry (also eat corpses) if (game.Rules.IsActorHungry(m_Actor)) { ActorAction eatAction = BehaviorEat(game); if (eatAction != null) { m_Actor.Activity = Activity.IDLE; return(eatAction); } if (game.Rules.IsActorStarving(m_Actor) || game.Rules.IsActorInsane(m_Actor)) { eatAction = BehaviorGoEatCorpse(game, FilterCorpses(game, mapPercepts)); if (eatAction != null) { m_Actor.Activity = Activity.IDLE; return(eatAction); } } } // 8 sleep. if (!hasAnyEnemies && WouldLikeToSleep(game, m_Actor) && IsInside(m_Actor) && game.Rules.CanActorSleep(m_Actor)) { // secure sleep? ActorAction secureSleepAction = BehaviorSecurePerimeter(game, m_LOSSensor.FOV); if (secureSleepAction != null) { m_Actor.Activity = Activity.IDLE; return(secureSleepAction); } // sleep. ActorAction sleepAction = BehaviorSleep(game, m_LOSSensor.FOV); if (sleepAction != null) { if (sleepAction is ActionSleep) { m_Actor.Activity = Activity.SLEEPING; } return(sleepAction); } } // 9 drop light/tracker with no batteries ActorAction dropOutOfBatteries = BehaviorDropUselessItem(game); if (dropOutOfBatteries != null) { m_Actor.Activity = Activity.IDLE; return(dropOutOfBatteries); } // 11 get nearby item (not if seeing enemy) // ignore not currently visible items & blocked items. // alpha10 upgraded rule to use the same new core behavior as CivilianAI with custom params if (!hasCurrentEnemies) { // alpha10 new common behaviour code, also used by CivilianAI, but Gangs can break and push ActorAction getItemAction = BehaviorGoGetInterestingItems(game, mapPercepts, true, true, CANT_GET_ITEM_EMOTE, false, ref m_DummyPerceptLastItemsSaw); if (getItemAction != null) { return(getItemAction); } } // 12 steal item from someone. if (!hasCurrentEnemies) { Map map = m_Actor.Location.Map; List <Percept> mayStealFrom = FilterActors(game, FilterCurrent(game, mapPercepts), (a) => { if (a.Inventory == null || a.Inventory.CountItems == 0 || IsFriendOf(game, a)) { return(false); } if (game.Rules.RollChance(game.Rules.ActorUnsuspicousChance(m_Actor, a))) { // emote. game.DoEmote(a, String.Format("moves unnoticed by {0}.", m_Actor.Name)); // unnoticed. return(false); } return(HasAnyInterestingItem(game, a.Inventory, ItemSource.ANOTHER_ACTOR)); }); if (mayStealFrom != null) { // alpha10 make sure to consider only reachable victims RouteFinder.SpecialActions allowedActions; allowedActions = RouteFinder.SpecialActions.ADJ_TO_DEST_IS_GOAL | RouteFinder.SpecialActions.JUMP | RouteFinder.SpecialActions.DOORS; // gangs can break & push stuff allowedActions |= RouteFinder.SpecialActions.BREAK | RouteFinder.SpecialActions.PUSH; FilterOutUnreachablePercepts(game, ref mayStealFrom, allowedActions); if (mayStealFrom.Count > 0) { // get data. Percept nearest = FilterNearest(game, mayStealFrom); Actor victim = nearest.Percepted as Actor; Item wantIt = FirstInterestingItem(game, victim.Inventory, ItemSource.ANOTHER_ACTOR); // make an enemy of him. game.DoMakeAggression(m_Actor, victim); // declare my evil intentions. m_Actor.Activity = Activity.CHASING; m_Actor.TargetActor = victim; return(new ActionSay(m_Actor, game, victim, String.Format("Hey! That's some nice {0} you have here!", wantIt.Model.SingleName), RogueGame.Sayflags.IS_IMPORTANT | RogueGame.Sayflags.IS_DANGER)); } } } // 13 tear down barricade ActorAction attackBarricadeAction = BehaviorAttackBarricade(game); if (attackBarricadeAction != null) { m_Actor.Activity = Activity.IDLE; return(attackBarricadeAction); } // 14 follow leader if (checkOurLeader) { Point lastKnownLeaderPosition = m_Actor.Leader.Location.Position; bool isLeaderVisible = FOV.Contains(m_Actor.Leader.Location.Position); int maxDist = m_Actor.Leader.IsPlayer ? FOLLOW_PLAYERLEADER_MAXDIST : FOLLOW_NPCLEADER_MAXDIST; ActorAction followAction = BehaviorFollowActor(game, m_Actor.Leader, lastKnownLeaderPosition, isLeaderVisible, maxDist); if (followAction != null) { m_Actor.Activity = Activity.FOLLOWING; m_Actor.TargetActor = m_Actor.Leader; return(followAction); } } // 15 take lead (if leadership) bool isLeader = m_Actor.Sheet.SkillTable.GetSkillLevel((int)Skills.IDs.LEADERSHIP) >= 1; bool canLead = !checkOurLeader && isLeader && m_Actor.CountFollowers < game.Rules.ActorMaxFollowers(m_Actor); if (canLead) { Percept nearestFriend = FilterNearest(game, FilterNonEnemies(game, mapPercepts)); if (nearestFriend != null) { ActorAction leadAction = BehaviorLeadActor(game, nearestFriend); if (leadAction != null) { m_Actor.Activity = Activity.IDLE; m_Actor.TargetActor = nearestFriend.Percepted as Actor; return(leadAction); } } } // 16 (leader) don't leave followers behind. if (m_Actor.CountFollowers > 0) { Actor target; ActorAction stickTogether = BehaviorDontLeaveFollowersBehind(game, 3, out target); if (stickTogether != null) { // emote? if (game.Rules.RollChance(DONT_LEAVE_BEHIND_EMOTE_CHANCE)) { if (target.IsSleeping) { game.DoEmote(m_Actor, String.Format("patiently waits for {0} to wake up.", target.Name)); } else { if (m_LOSSensor.FOV.Contains(target.Location.Position)) { game.DoEmote(m_Actor, String.Format("Hey {0}! F*****g move!", target.Name)); } else { game.DoEmote(m_Actor, String.Format("Where is that {0} retard?", target.Name)); } } } // go! m_Actor.Activity = Activity.IDLE; return(stickTogether); } } // 17 explore ActorAction exploreAction = BehaviorExplore(game, m_Exploration); if (exploreAction != null) { m_Actor.Activity = Activity.IDLE; return(exploreAction); } // 18 wander m_Actor.Activity = Activity.IDLE; return(BehaviorWander(game, m_Exploration)); }
public System.Object getState(Percept p) { return ((DynamicPercept)p) .getAttribute(DynAttributeNames.PERCEPT_IN); }
public void updateState(Percept c) { this.state.Graph = c.Graph; this.state.Current = c.Current; }
public abstract string execute(Percept percept);
public abstract Actuator process(Percept p);
protected abstract DynamicState updateState(DynamicState state, Action action, Percept percept, Model model);
/// <summary> /// Remove a percept /// </summary> /// <param name="percept">Percept</param> public void RemovePercept(Percept percept) { _percepts.Remove(percept); }
// function ONLINE-DFS-AGENT(s') returns an action // inputs: s', a percept that identifies the current state public override Action execute(Percept psPrime) { Object sPrime = ptsFunction.getState(psPrime); // if GOAL-TEST(s') then return stop if (goalTest(sPrime)) { a = NoOpAction.NO_OP; } else { // if s' is a new state (not in untried) then untried[s'] <- // ACTIONS(s') if (!untried.containsKey(sPrime)) { untried.put(sPrime, actions(sPrime)); } // if s is not null then do if (null != s) { // Note: If I've already seen the result of this // [s, a] then don't put it back on the unbacktracked // list otherwise you can keep oscillating // between the same states endlessly. if (!(sPrime.Equals(result.get(s, a)))) { // result[s, a] <- s' result.put(s, a, sPrime); // Ensure the unbacktracked always has a list for s' if (!unbacktracked.containsKey(sPrime)) { unbacktracked.put(sPrime, new List<Object>()); } // add s to the front of the unbacktracked[s'] unbacktracked.get(sPrime).Add(0, s); } } // if untried[s'] is empty then if (untried.get(sPrime).isEmpty()) { // if unbacktracked[s'] is empty then return stop if (unbacktracked.get(sPrime).isEmpty()) { a = NoOpAction.NO_OP; } else { // else a <- an action b such that result[s', b] = // POP(unbacktracked[s']) Object popped = unbacktracked.get(sPrime).remove(0); foreach (Pair<Object, Action> sa in result.keySet()) { if (sa.getFirst().Equals(sPrime) && result.get(sa).Equals(popped)) { a = sa.getSecond(); break; } } } } else { // else a <- POP(untried[s']) a = untried.get(sPrime).remove(0); } } if (a.isNoOp()) { // I'm either at the Goal or can't get to it, // which in either case I'm finished so just die. setAlive(false); } // s <- s' s = sPrime; // return a return a; }
/// <summary> /// What the apeal is to the agent for getting away from it's target /// The higher the rating the higher the appeal value, and the more likely /// the agent is for running away. /// </summary> /// <returns>The appeal of invading the target</returns> /// <param name="percept">Percept, environment we're evaluating</param> float appealOfEvading(Percept percept) { // If the enemy is dashing and we are low health, we want to evade if (target.getCurrentChickenState() == ChickenState.Dashing){ return 1 - (percept.curHealth / control.getMaxHealth()); } return 0; }
/// <summary> /// What the apeal is to the agent for getting closer to it's target. /// The higher the rating the higher the appeal value, and the more likely /// the agent is for invading. /// </summary> /// <returns>The appeal of invading the target</returns> /// <param name="percept">Percept, environment we're evaluating</param> float appealOfInvading(Percept percept) { return 0; }
public string execute(Percept p) { return program.execute(p); }
// PROTECTED METHODS protected abstract State updateState(Percept p);
private static List<Percept> createPerceptSequence(Percept percepts) { List<Percept> perceptSequence = new List<Percept>(); foreach (Percept p in percepts) { perceptSequence.Add(p); } return perceptSequence; }
// function LRTA*-AGENT(s') returns an action // inputs: s', a percept that identifies the current state public override Action execute(Percept psPrime) { Object sPrime = ptsFunction.getState(psPrime); // if GOAL-TEST(s') then return stop if (goalTest(sPrime)) { a = NoOpAction.NO_OP; } else { // if s' is a new state (not in H) then H[s'] <- h(s') if (!H.containsKey(sPrime)) { H.put(sPrime, getHeuristicFunction().h(sPrime)); } // if s is not null if (null != s) { // result[s, a] <- s' result.put(s, a, sPrime); // H[s] <- min LRTA*-COST(s, b, result[s, b], H) // b (element of) ACTIONS(s) double min = Double.MAX_VALUE; foreach (Action b in actions(s)) { double cost = lrtaCost(s, b, result.get(s, b)); if (cost < min) { min = cost; } } H.put(s, min); } // a <- an action b in ACTIONS(s') that minimizes LRTA*-COST(s', b, // result[s', b], H) double min = Double.MAX_VALUE; // Just in case no actions a = NoOpAction.NO_OP; foreach (Action b in actions(sPrime)) { double cost = lrtaCost(sPrime, b, result.get(sPrime, b)); if (cost < min) { min = cost; a = b; } } } // s <- s' s = sPrime; if (a.isNoOp()) { // I'm either at the Goal or can't get to it, // which in either case I'm finished so just die. setAlive(false); } // return a return a; }
/// <summary> /// Generates a percept for the given instance of time that is meant to represent /// all data relevant to the agent for making a desicion. /// </summary> /// <returns>The percept at this instance.</returns> Percept getPerceptAtThisInstance() { // Make a new Percept Percept newPercept = new Percept(); // Formula for distance of chickens, necessary for team calculations // The distance we are away from our target. // float distanceFromTarget = Vector3.Distance (control.transform.position, target.transform.position); // Calculate values for all of percept fields in order of struct definition float curHealth = control.getCurrentHealth(); // Find number of teammates and: // public float[] teamatesHealths; // public float[] teamatesDistancesFromUs; // public float[] teamatesDistancesFromTarget; float targetHealth = target.getCurrentHealth(); // public int numOfTargetAllies; // public float[] targetsAlliesHealths; // public float[] targetsAlliesDistancesFromTarget; // public float[] targetsAlliesDistancesFromUs; ChickenState targetCurrentState = target.getCurrentChickenState(); // Now assign all values to the struct in order of struct definition newPercept.curHealth = curHealth; newPercept.targetHealth = targetHealth; newPercept.targetState = targetCurrentState; // Finally return the new Percept instance return newPercept; }
protected override ActorAction SelectAction(RogueGame game, List <Percept> percepts) { HashSet <Point> fov = (m_MemLOSSensor.Sensor as LOSSensor).FOV; List <Percept> mapPercepts = FilterSameMap(game, percepts); ////////////////////////////////////////////////////////////// // 1 move closer to an enemy, nearest & visible enemies first // 2 eat corpses. // 3 use exits (if ability) // 4 move close to nearest undead master (if not master) // 5 move to highest adjacent undead master scent (if not master) // 6 move to highest living scent // 7 **DISABLED** assault breakables (if ability) // 8 randomly push objects around (if ability OR skill STRONG) // 9 explore (if ability) // 10 wander ////////////////////////////////////////////////////////////// // get data. if (m_Actor.Model.Abilities.ZombieAI_Explore) { // exploration. m_Exploration.Update(m_Actor.Location); } // 1 move closer to an enemy, nearest & visible enemies first #region List <Percept> enemies = FilterEnemies(game, mapPercepts); if (enemies != null) { // try visible enemies first, the closer the best. List <Percept> visibleEnemies = Filter(game, enemies, (p) => p.Turn == m_Actor.Location.Map.LocalTime.TurnCounter); if (visibleEnemies != null) { Percept bestEnemyPercept = null; ActorAction bestBumpAction = null; float closest = int.MaxValue; foreach (Percept enemyP in visibleEnemies) { float distance = game.Rules.GridDistance(m_Actor.Location.Position, enemyP.Location.Position); if (distance < closest) { ActorAction bumpAction = BehaviorStupidBumpToward(game, enemyP.Location.Position, true, true); if (bumpAction != null) { closest = distance; bestEnemyPercept = enemyP; bestBumpAction = bumpAction; } } } if (bestBumpAction != null) { m_Actor.Activity = Activity.CHASING; m_Actor.TargetActor = bestEnemyPercept.Percepted as Actor; return(bestBumpAction); } } // then try rest, the closer the best. List <Percept> oldEnemies = Filter(game, enemies, (p) => p.Turn != m_Actor.Location.Map.LocalTime.TurnCounter); if (oldEnemies != null) { Percept bestEnemyPercept = null; ActorAction bestBumpAction = null; float closest = int.MaxValue; foreach (Percept enemyP in oldEnemies) { float distance = game.Rules.GridDistance(m_Actor.Location.Position, enemyP.Location.Position); if (distance < closest) { ActorAction bumpAction = BehaviorStupidBumpToward(game, enemyP.Location.Position, true, true); if (bumpAction != null) { closest = distance; bestEnemyPercept = enemyP; bestBumpAction = bumpAction; } } } if (bestBumpAction != null) { m_Actor.Activity = Activity.CHASING; m_Actor.TargetActor = bestEnemyPercept.Percepted as Actor; return(bestBumpAction); } } } #endregion // 2 eat corpses. List <Percept> corpses = FilterCorpses(game, mapPercepts); if (corpses != null) { ActorAction eatCorpses = BehaviorGoEatCorpse(game, corpses); if (eatCorpses != null) { m_Actor.Activity = Activity.IDLE; return(eatCorpses); } } // 3 use exit (if ability) #region // move before following scents so the AI is more likely to move into basements etc... if (m_Actor.Model.Abilities.AI_CanUseAIExits && game.Rules.RollChance(USE_EXIT_CHANCE)) { ActorAction useExit = BehaviorUseExit(game, UseExitFlags.ATTACK_BLOCKING_ENEMIES | UseExitFlags.BREAK_BLOCKING_OBJECTS | UseExitFlags.DONT_BACKTRACK); if (useExit != null) { // memory is obsolete, clear it. m_MemLOSSensor.Clear(); m_Actor.Activity = Activity.IDLE; return(useExit); } } #endregion // 4 move close to nearest undead master (if not master) #region if (!m_Actor.Model.Abilities.IsUndeadMaster) { Percept nearestMaster = FilterNearest(game, FilterActors(game, mapPercepts, (a) => a.Model.Abilities.IsUndeadMaster)); if (nearestMaster != null) { ActorAction bumpAction = BehaviorStupidBumpToward(game, RandomPositionNear(game.Rules, m_Actor.Location.Map, nearestMaster.Location.Position, 3), true, true); if (bumpAction != null) { // MAASTEERRR! m_Actor.Activity = Activity.FOLLOWING; m_Actor.TargetActor = nearestMaster.Percepted as Actor; return(bumpAction); } } } #endregion // 5 move to highest undead master scent (if not master) #region if (!m_Actor.Model.Abilities.IsUndeadMaster) { ActorAction trackMasterAction = BehaviorTrackScent(game, m_MasterSmellSensor.Scents); if (trackMasterAction != null) { m_Actor.Activity = Activity.TRACKING; return(trackMasterAction); } } #endregion // 6 move to highest living scent #region ActorAction trackLivingAction = BehaviorTrackScent(game, m_LivingSmellSensor.Scents); if (trackLivingAction != null) { m_Actor.Activity = Activity.TRACKING; return(trackLivingAction); } #endregion // 7 **DISABLED** assault breakables (if ability) #if false #region if (m_Actor.Model.Abilities.ZombieAI_AssaultBreakables) { ActorAction assaultAction = BehaviorAssaultBreakables(game, fov); if (assaultAction != null) { m_Actor.Activity = Activity.IDLE; return(assaultAction); } } #endregion #endif // 8 randomly push objects around (if ability OR skill STRONG) #region if (game.Rules.HasActorPushAbility(m_Actor) && game.Rules.RollChance(PUSH_OBJECT_CHANCE)) { #if false special ZM case disabled