コード例 #1
0
ファイル: BGoapNode.cs プロジェクト: M0rt1mer/Unity-goap
    public BGoapNode(IGoapPlanner planner, BGoapState parentGoal, BGoapNode parent, ReGoapActionState actionState)
    {
        this.planner = planner;
        this.parent = parent;
        if(actionState != null) {
            this.action = actionState.Action;
            this.actionSettings = actionState.Settings;
        }

        if (this.parent != null){
            g = parent.GetPathCost();
        }

        var nextAction = parent == null ? null : parent.action;
        if(action != null) {

            //first step - subtract effects of action
            var effects = action.GetEffects( parentGoal, actionSettings, nextAction );
            try {
                goal = parentGoal.Difference( effects, false ); //dont use defaults here, only subtract what really is in the effect
            } catch(ArgumentException e) {
                Debug.Log( e );
            }
            //then add preconditions to the current goal state
            var preconditions = action.GetPreconditions( parentGoal, actionSettings, nextAction );
            goal = goal.Union( preconditions );
            
            g += action.GetCost( parentGoal, actionSettings, nextAction );

        } else goal = parentGoal;
        h = goal.Distance( planner.GetCurrentAgent().GetMemory().GetWorldState() );
        // f(node) = g(node) + h(node)
        cost = g + h * heuristicMultiplier;
    }
コード例 #2
0
    public void ReplaceGoapPlanner(IGoapPlanner <string, object> newValue)
    {
        var index     = AIComponentsLookup.GoapPlanner;
        var component = CreateComponent <GoapPlannerComponent>(index);

        component.value = newValue;
        ReplaceComponent(index, component);
    }
コード例 #3
0
ファイル: ReGoapTests.cs プロジェクト: Link-SD/Kernmodule
    public void TestSimpleChainedPlan(IGoapPlanner planner)
    {
        var gameObject = new GameObject();

        ReGoapTestsHelper.GetCustomAction(gameObject, "CreateAxe",
                                          new Dictionary <string, bool> {
            { "hasWood", true }, { "hasSteel", true }
        },
                                          new Dictionary <string, bool> {
            { "hasAxe", true }, { "hasWood", false }, { "hasSteel", false }
        }, 10);
        ReGoapTestsHelper.GetCustomAction(gameObject, "ChopTree",
                                          new Dictionary <string, bool> {
        },
                                          new Dictionary <string, bool> {
            { "hasRawWood", true }
        }, 2);
        ReGoapTestsHelper.GetCustomAction(gameObject, "WorksWood",
                                          new Dictionary <string, bool> {
            { "hasRawWood", true }
        },
                                          new Dictionary <string, bool> {
            { "hasWood", true }, { "hasRawWood", false }
        }, 5);
        ReGoapTestsHelper.GetCustomAction(gameObject, "MineOre",
                                          new Dictionary <string, bool> {
            { "hasOre", false }
        },
                                          new Dictionary <string, bool> {
            { "hasOre", true }
        }, 10);
        ReGoapTestsHelper.GetCustomAction(gameObject, "SmeltOre",
                                          new Dictionary <string, bool> {
            { "hasOre", true }, { "hasSteel", false }
        },
                                          new Dictionary <string, bool> {
            { "hasSteel", true }, { "hasOre", false }
        }, 10);

        var hasAxeGoal = ReGoapTestsHelper.GetCustomGoal(gameObject, "HasAxeGoal",
                                                         new Dictionary <string, bool> {
            { "hasAxe", true }
        });

        var memory = gameObject.AddComponent <ReGoapTestMemory>();

        memory.Init();

        var agent = gameObject.AddComponent <ReGoapTestAgent>();

        agent.Init();

        var plan = planner.Plan(agent, null, null, null);

        Assert.That(plan, Is.EqualTo(hasAxeGoal));
        // validate plan actions
        ReGoapTestsHelper.ApplyAndValidatePlan(plan, memory);
    }
コード例 #4
0
ファイル: ReGoapNode.cs プロジェクト: Link-SD/Kernmodule
    public ReGoapNode(IGoapPlanner planner, ReGoapState newGoal, ReGoapNode parent, IReGoapAction action)
    {
        this.planner = planner;
        this.parent  = parent;
        this.action  = action;
        if (action != null)
        {
            actionSettings = action.GetSettings(planner.GetCurrentAgent(), goal);
        }

        if (this.parent != null)
        {
            state = this.parent.GetState();
            // g(node)
            g = parent.GetPathCost();
        }
        else
        {
            state = planner.GetCurrentAgent().GetMemory().GetWorldState();
        }

        var nextAction = parent == null ? null : parent.action;

        if (action != null)
        {
            // backward search does NOT support negative preconditions
            // since in backward search we relax the problem all preconditions are valid but are added to the current goal
            var preconditions = action.GetPreconditions(newGoal, nextAction);
            goal = newGoal + preconditions;

            var effects = action.GetEffects(newGoal, nextAction);
            state += effects;
            g     += action.GetCost(newGoal, nextAction);

            // removing current action effects from goal, no need to do with to the whole state
            //  since the state is the sum of all the previous actions's effects.
            var missingState = new ReGoapState();
            goal.MissingDifference(effects, ref missingState);
            goal = missingState;

            // this is needed every step to make sure that any precondition is not already satisfied
            //  by the world state
            var worldMissingState = new ReGoapState();
            goal.MissingDifference(planner.GetCurrentAgent().GetMemory().GetWorldState(), ref worldMissingState);
            goal = worldMissingState;
        }
        else
        {
            var diff = new ReGoapState();
            newGoal.MissingDifference(state, ref diff);
            goal = diff;
        }
        h = goal.Count;
        // f(node) = g(node) + h(node)
        cost = g + h * heuristicMultiplier;
    }
コード例 #5
0
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action)
        {
            expandList.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (action != null)
            {
                actionSettings = action.GetSettings(planner.GetCurrentAgent(), newGoal);
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // since in backward search we relax the problem all preconditions are valid but are added to the current goal
                var preconditions = action.GetPreconditions(newGoal, nextAction);
                goal = newGoal + preconditions;

                var effects = action.GetEffects(newGoal, nextAction);
                state.AddFromState(effects);
                g += action.GetCost(newGoal, nextAction);

                // removing current action effects from goal, no need to do with to the whole state
                //  since the state is the sum of all the previous actions's effects.
                goal.ReplaceWithMissingDifference(effects);

                // this is needed every step to make sure that any precondition is not already satisfied
                //  by the world state
                goal.ReplaceWithMissingDifference(planner.GetCurrentAgent().GetMemory().GetWorldState());
            }
            else
            {
                var diff = ReGoapState <T, W> .Instantiate();

                newGoal.MissingDifference(state, ref diff);
                goal = diff;
            }
            h = goal.Count;
            // f(node) = g(node) + h(node)
            cost = g + h * heuristicMultiplier;
        }
コード例 #6
0
ファイル: ReGoapNode.cs プロジェクト: jballaban/game1
    public static ReGoapNode <T, W> Instantiate(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action)
    {
        if (cachedNodes == null)
        {
            cachedNodes = new Stack <ReGoapNode <T, W> >();
        }
        ReGoapNode <T, W> node = cachedNodes.Count > 0 ? cachedNodes.Pop() : new ReGoapNode <T, W>();

        node.Init(planner, newGoal, parent, action);
        return(node);
    }
コード例 #7
0
ファイル: ReGoapNode.cs プロジェクト: sosan/ReGoap
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action)
        {
            expandList.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (action != null)
            {
                actionSettings = action.GetSettings(planner.GetCurrentAgent(), newGoal);
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // create a new instance of the goal based on the paren't goal
                goal = ReGoapState <T, W> .Instantiate(newGoal);

                var preconditions = action.GetPreconditions(goal, nextAction);
                var effects       = action.GetEffects(goal, nextAction);
                // adding the action's effects to the current node's state
                state.AddFromState(effects);
                // addding the action's cost to the node's total cost
                g += action.GetCost(goal, nextAction);

                // add all preconditions of the current action to the goal
                goal.AddFromState(preconditions);
                // removes from goal all the conditions that are now fullfiled in the node's state
                goal.ReplaceWithMissingDifference(state);
            }
            else
            {
                var diff = ReGoapState <T, W> .Instantiate();

                newGoal.MissingDifference(state, ref diff);
                goal = diff;
            }
            h = goal.Count;
            // f(node) = g(node) + h(node)
            cost = g + h * heuristicMultiplier;
        }
コード例 #8
0
        public static GoapNode <T> Instantiate(IGoapPlanner planner, GoapState goalState, GoapNode <T> parent, IGoapAction action)
        {
            GoapNode <T> node;

            if (m_cachedNodes == null)
            {
                m_cachedNodes = new Stack <GoapNode <T> >();
            }

            node = (m_cachedNodes.Count > 0) ? m_cachedNodes.Pop() : new GoapNode <T>();
            node.Init(planner, goalState, parent, action);
            return(node);
        }
コード例 #9
0
        public void Init(IGoapPlanner planner, GoapState goalState, GoapNode <T> parent, IGoapAction action)
        {
            m_expandList.Clear();

            m_planner = planner;
            m_parent  = parent;
            m_action  = action;


            if (m_parent != null)
            {
                m_currentState = parent.GetState().Clone();
                m_gCost        = parent.GetCost();
            }
            else
            {
                m_currentState = m_planner.GetAgent().GetMemory().GetWorldState().Clone();
            }


            if (action != null)
            {
                m_gCost += action.GetCost();

                GoapState preconditions = action.GetPreConditions(goalState);
                m_targetState = goalState + preconditions;

                GoapState effects = action.GetPostEffects(goalState);
                m_currentState.AddFromState(effects);


                //Did this action's effect fulfill any of the goals?
                m_targetState.RemoveCompletedConditions(effects);

                //Did the world fulfill any of the goals?
                m_targetState.RemoveCompletedConditions(m_planner.GetAgent().GetMemory().GetWorldState());
            }
            else
            {
                var diff = GoapState.Instantiate();
                goalState.CreateStateWithMissingDifferences(m_currentState, ref diff);
                m_targetState = diff;
            }


            //Cost is equal to the amount of extra actions
            m_hCost = m_targetState.Count;
        }
コード例 #10
0
ファイル: ReGoapArithOpTests.cs プロジェクト: TMPxyz/ReGoap
        public void TestPlan2(IGoapPlanner <string, object> planner)
        {
            var gameObject = new GameObject();

            ReGoapTestsHelper.GetCustomAction(gameObject, "BuyFood",
                                              new Dictionary <string, object> {
                { "IntGold", 5 }
            },
                                              new Dictionary <string, object> {
                { "IntGold", -5 }, { "IntFood", 2 }
            },
                                              3);
            ReGoapTestsHelper.GetCustomAction(gameObject, "GoMine",
                                              new Dictionary <string, object> {
                { "IntFood", 2 }
            },
                                              new Dictionary <string, object> {
                { "IntFood", -2 }, { "IntGold", 20 }
            },
                                              5);

            var miningGoal = ReGoapTestsHelper.GetCustomGoal(gameObject, "Mine",
                                                             new Dictionary <string, object> {
                { "IntGold", 40 }
            });

            var memory = gameObject.AddComponent <ReGoapTestMemory>();

            memory.Init();
            memory.SetStructValue("IntGold", StructValue.CreateIntArithmetic(20));

            var agent = gameObject.AddComponent <ReGoapTestAgent>();

            agent.Init();
            agent.debugPlan = true;

            var plan = planner.Plan(agent, null, null, null);

            Assert.That(plan, Is.EqualTo(miningGoal));
            // validate plan actions
            ReGoapTestsHelper.ApplyAndValidatePlan(plan, memory);
        }
コード例 #11
0
ファイル: ReGoapTests.cs プロジェクト: Link-SD/Kernmodule
    public void TestTwoPhaseChainedPlan(IGoapPlanner planner)
    {
        var gameObject = new GameObject();

        ReGoapTestsHelper.GetCustomAction(gameObject, "CCAction",
                                          new Dictionary <string, bool> {
            { "hasWeaponEquipped", true }, { "isNearEnemy", true }
        },
                                          new Dictionary <string, bool> {
            { "killedEnemy", true }
        }, 4);
        ReGoapTestsHelper.GetCustomAction(gameObject, "EquipAxe",
                                          new Dictionary <string, bool> {
            { "hasAxe", true }
        },
                                          new Dictionary <string, bool> {
            { "hasWeaponEquipped", true }
        }, 1);
        ReGoapTestsHelper.GetCustomAction(gameObject, "GoToEnemy",
                                          new Dictionary <string, bool> {
            { "hasTarget", true }
        },
                                          new Dictionary <string, bool> {
            { "isNearEnemy", true }
        }, 3);
        ReGoapTestsHelper.GetCustomAction(gameObject, "CreateAxe",
                                          new Dictionary <string, bool> {
            { "hasWood", true }, { "hasSteel", true }
        },
                                          new Dictionary <string, bool> {
            { "hasAxe", true }, { "hasWood", false }, { "hasSteel", false }
        }, 10);
        ReGoapTestsHelper.GetCustomAction(gameObject, "ChopTree",
                                          new Dictionary <string, bool> {
        },
                                          new Dictionary <string, bool> {
            { "hasRawWood", true }
        }, 2);
        ReGoapTestsHelper.GetCustomAction(gameObject, "WorksWood",
                                          new Dictionary <string, bool> {
            { "hasRawWood", true }
        },
                                          new Dictionary <string, bool> {
            { "hasWood", true }, { "hasRawWood", false }
        }, 5);
        ReGoapTestsHelper.GetCustomAction(gameObject, "MineOre", new Dictionary <string, bool> {
            { "hasOre", false }
        },
                                          new Dictionary <string, bool> {
            { "hasOre", true }
        }, 10);
        ReGoapTestsHelper.GetCustomAction(gameObject, "SmeltOre",
                                          new Dictionary <string, bool> {
            { "hasOre", true }
        },
                                          new Dictionary <string, bool> {
            { "hasSteel", true }, { "hasOre", false }
        }, 10);

        var readyToFightGoal = ReGoapTestsHelper.GetCustomGoal(gameObject, "ReadyToFightGoal",
                                                               new Dictionary <string, bool> {
            { "hasWeaponEquipped", true }
        }, 2);

        ReGoapTestsHelper.GetCustomGoal(gameObject, "HasAxeGoal",
                                        new Dictionary <string, bool> {
            { "hasAxe", true }
        });
        var killEnemyGoal = ReGoapTestsHelper.GetCustomGoal(gameObject, "KillEnemyGoal",
                                                            new Dictionary <string, bool> {
            { "killedEnemy", true }
        }, 3);

        var memory = gameObject.AddComponent <ReGoapTestMemory>();

        memory.Init();

        var agent = gameObject.AddComponent <ReGoapTestAgent>();

        agent.Init();

        // first plan should create axe and equip it, through 'ReadyToFightGoal', since 'hasTarget' is false (memory should handle this)
        var plan = planner.Plan(agent, null, null, null);

        Assert.That(plan, Is.EqualTo(readyToFightGoal));
        // we apply manually the effects, but in reality the actions should do this themselves
        //  and the memory should understand what happened
        //  (e.g. equip weapon action? memory should set 'hasWeaponEquipped' to true if the action equipped something)
        // validate plan actions
        ReGoapTestsHelper.ApplyAndValidatePlan(plan, memory);

        // now we tell the memory that we see the enemy
        memory.SetValue("hasTarget", true);
        // now the planning should choose KillEnemyGoal
        plan = planner.Plan(agent, null, null, null);

        Assert.That(plan, Is.EqualTo(killEnemyGoal));
        ReGoapTestsHelper.ApplyAndValidatePlan(plan, memory);
    }
コード例 #12
0
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action)
        {
            expandList.Clear();
            tmpKeys.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (action != null)
            {
                actionSettings = action.GetSettings(planner.GetCurrentAgent(), newGoal);
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // create a new instance of the goal based on the paren't goal
                goal = ReGoapState <T, W> .Instantiate();

                var tmpGoal = ReGoapState <T, W> .Instantiate(newGoal);

                var preconditions = action.GetPreconditions(tmpGoal, nextAction);
                var effects       = action.GetEffects(tmpGoal, nextAction);
                // adding the action's effects to the current node's state
                state.AddFromState(effects);
                // addding the action's cost to the node's total cost
                g += action.GetCost(tmpGoal, nextAction);

                //// add all preconditions of the current action to the goal
                //tmpGoal.AddFromState(preconditions);
                //// removes from goal all the conditions that are now fulfilled in the node's state
                //tmpGoal.ReplaceWithMissingDifference(state);
                ////goal.ReplaceWithMissingDifference(effects);

                // collect all keys from goal & precondition, unique-ed
                foreach (var pr in tmpGoal.GetValues())
                {
                    var k = pr.Key;
                    if (!tmpKeys.Contains(k))
                    {
                        tmpKeys.Add(k);
                    }
                }
                foreach (var pr in preconditions.GetValues())
                {
                    var k = pr.Key;
                    if (!tmpKeys.Contains(k))
                    {
                        tmpKeys.Add(k);
                    }
                }

                // process each keys
                foreach (var k in tmpKeys)
                {
                    StructValue goalValue, effectValue, precondValue, stateValue, protoValue;
                    tmpGoal.GetValues().TryGetValue(k, out goalValue);
                    effects.GetValues().TryGetValue(k, out effectValue);
                    preconditions.GetValues().TryGetValue(k, out precondValue);
                    state.GetValues().TryGetValue(k, out stateValue);

                    StructValue.EValueType valueType;
                    _GetValueType(ref goalValue, ref effectValue, ref precondValue, ref stateValue, out valueType, out protoValue);
                    if (valueType == StructValue.EValueType.Arithmetic)
                    {
                        //_EnsureArithStructValueInited(ref goalValue, ref protoValue);
                        _EnsureArithStructValueInited(ref effectValue, ref protoValue);
                        _EnsureArithStructValueInited(ref precondValue, ref protoValue);
                        _EnsureArithStructValueInited(ref stateValue, ref protoValue);
                        if (!goalValue.Inited)
                        {
                            goalValue = StructValue.CopyCreate(ref stateValue, -(Convert.ToSingle(stateValue.v) - Convert.ToSingle(effectValue.v)));
                        }

                        float fGoal    = Convert.ToSingle(goalValue.v);
                        float fEffect  = Convert.ToSingle(effectValue.v);
                        float fPrecond = Convert.ToSingle(precondValue.v);
                        float fState   = Convert.ToSingle(stateValue.v);

                        float finalV = Math.Max(
                            fGoal - fEffect,
                            Math.Min(fPrecond, fPrecond - fState)
                            );

                        var sv = StructValue.CopyCreate(ref protoValue, finalV);

                        goal.SetStructValue(k, sv);
                    }
                    else if (valueType == StructValue.EValueType.Other)
                    {
                        //ReplaceWithMissingDifference
                        if (stateValue.Inited && goalValue.Inited && goalValue.IsFulfilledBy(stateValue))
                        {
                            goalValue.Invalidate();
                        }

                        // AddFromPrecond
                        // 1. if the precond is satisfied by the memory start state, then discard
                        // 2. else this newly added goal from precond, should not be removed due to fulfilled by curStateValue
                        if (precondValue.Inited)
                        {
                            bool        preCondfulfilledByMem = false;
                            var         startMemoryState      = planner.GetCurrentAgent().GetMemory().GetWorldState();
                            StructValue startMemoryValue;
                            if (startMemoryState.GetValues().TryGetValue(k, out startMemoryValue))
                            {
                                if (startMemoryValue.Inited && precondValue.IsFulfilledBy(startMemoryValue))
                                {
                                    preCondfulfilledByMem = true;
                                }
                            }

                            if (!preCondfulfilledByMem)
                            {
                                if (goalValue.Inited)
                                {
                                    goalValue = goalValue.MergeWith(precondValue);
                                }
                                else
                                {
                                    goalValue = precondValue;
                                }
                            }
                        }

                        if (goalValue.Inited)
                        {
                            goal.SetStructValue(k, goalValue);
                        }
                    }
                    else
                    {
                        UnityEngine.Debug.LogError("Unexpected StructValue type: " + valueType);
                    }
                }// foreach (var k in tmpKeys)

                tmpGoal.Recycle();
            }
            else
            {
                var diff = ReGoapState <T, W> .Instantiate();

                newGoal.MissingDifference(state, ref diff);
                goal = diff;
            }

            h = _CalculateH();

            // f(node) = g(node) + h(node)
            cost = g + h * planner.GetSettings().HeuristicMultiplier;
        }
コード例 #13
0
ファイル: ReGoapTests.cs プロジェクト: StephenGreenland/Goap
        public void TestSimpleChainedPlan(IGoapPlanner <string, object> planner)
        {
            var gameObject = new GameObject();

            ReGoapTestsHelper.GetCustomAction(gameObject, "CreateAxe",
                                              new Dictionary <string, object> {
                { "hasWood", true }, { "hasSteel", true }
            },
                                              new Dictionary <string, object> {
                { "hasAxe", true }, { "hasWood", false }, { "hasSteel", false }
            }, 10);
            ReGoapTestsHelper.GetCustomAction(gameObject, "ChopTree",
                                              new Dictionary <string, object> {
            },
                                              new Dictionary <string, object> {
                { "hasRawWood", true }
            }, 2);
            ReGoapTestsHelper.GetCustomAction(gameObject, "WorksWood",
                                              new Dictionary <string, object> {
                { "hasRawWood", true }
            },
                                              new Dictionary <string, object> {
                { "hasWood", true }, { "hasRawWood", false }
            }, 5);
            ReGoapTestsHelper.GetCustomAction(gameObject, "MineOre",
                                              new Dictionary <string, object> {
            },
                                              new Dictionary <string, object> {
                { "hasOre", true }
            }, 10);
            ReGoapTestsHelper.GetCustomAction(gameObject, "SmeltOre",
                                              new Dictionary <string, object> {
                { "hasOre", true }
            },
                                              new Dictionary <string, object> {
                { "hasSteel", true }, { "hasOre", false }
            }, 10);

            var hasAxeGoal = ReGoapTestsHelper.GetCustomGoal(gameObject, "HasAxeGoal",
                                                             new Dictionary <string, object> {
                { "hasAxe", true }
            });
            var greedyHasAxeAndOreGoal = ReGoapTestsHelper.GetCustomGoal(gameObject, "GreedyHasAxeAndOreGoal",
                                                                         new Dictionary <string, object> {
                { "hasAxe", true }, { "hasOre", true }, { "isGreedy", true }
            },
                                                                         2);

            var memory = gameObject.AddComponent <ReGoapTestMemory>();

            memory.Init();

            var agent = gameObject.AddComponent <ReGoapTestAgent>();

            agent.Init();

            var plan = planner.Plan(agent, null, null, null);

            Assert.That(plan, Is.EqualTo(hasAxeGoal));
            // validate plan actions
            ReGoapTestsHelper.ApplyAndValidatePlan(plan, agent, memory);

            // now we set the agent to be greedy, so the second goal can be activated
            memory.SetValue("isGreedy", true);
            // now the planning should choose KillEnemyGoal
            plan = planner.Plan(agent, null, null, null);

            Assert.That(plan, Is.EqualTo(greedyHasAxeAndOreGoal));
            ReGoapTestsHelper.ApplyAndValidatePlan(plan, agent, memory);
        }
コード例 #14
0
 public virtual void Precalculations(IGoapPlanner <T, W> goapPlanner)
 {
     planner = goapPlanner;
 }
コード例 #15
0
ファイル: ReGoapNode.cs プロジェクト: StephenGreenland/Goap
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action, ReGoapState <T, W> settings)
        {
            expandList.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (settings != null)
            {
                this.actionSettings = settings.Clone();
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // create a new instance of the goal based on the paren't goal
                goal = ReGoapState <T, W> .Instantiate(newGoal);

                GoapActionStackData <T, W> stackData;
                stackData.currentState = state;
                stackData.goalState    = goal;
                stackData.next         = action;
                stackData.agent        = planner.GetCurrentAgent();
                stackData.settings     = actionSettings;

                var preconditions = action.GetPreconditions(stackData);
                var effects       = action.GetEffects(stackData);
                // addding the action's cost to the node's total cost
                g += action.GetCost(stackData);

                // adding the action's effects to the current node's state
                state.AddFromState(effects);

                // removes from goal all the conditions that are now fullfiled in the action's effects
                goal.ReplaceWithMissingDifference(effects);
                // add all preconditions of the current action to the goal
                goal.AddFromState(preconditions);
            }
            else
            {
                goal = newGoal;
            }
            h = goal.Count;
            // f(node) = g(node) + h(node)
            cost = g + h * heuristicMultiplier;

            // additionally calculate the goal without any world effect to understand if we are done
            var diff = ReGoapState <T, W> .Instantiate();

            goal.MissingDifference(planner.GetCurrentAgent().GetMemory().GetWorldState(), ref diff);
            goalMergedWithWorld = diff;
        }
コード例 #16
0
    public static AIEntity AddAgent(this AIContext context, IGoapState <string, object> worldState, IGoapState <string, object> goalState, IGoapPlanner <string, object> planner)
    {
        var entity = context.CreateEntity();

        entity.AddGoapWorldState(worldState);
        entity.AddGoapGoalState(goalState);
        entity.AddGoapPlanner(planner);
        entity.isGoapAgent = true;
        return(entity);
    }