示例#1
0
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action)
        {
            expandList.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (action != null)
            {
                actionSettings = action.GetSettings(planner.GetCurrentAgent(), newGoal);
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // since in backward search we relax the problem all preconditions are valid but are added to the current goal
                var preconditions = action.GetPreconditions(newGoal, nextAction);
                goal = newGoal + preconditions;

                var effects = action.GetEffects(newGoal, nextAction);
                state.AddFromState(effects);
                g += action.GetCost(newGoal, nextAction);

                // removing current action effects from goal, no need to do with to the whole state
                //  since the state is the sum of all the previous actions's effects.
                goal.ReplaceWithMissingDifference(effects);

                // this is needed every step to make sure that any precondition is not already satisfied
                //  by the world state
                goal.ReplaceWithMissingDifference(planner.GetCurrentAgent().GetMemory().GetWorldState());
            }
            else
            {
                var diff = ReGoapState <T, W> .Instantiate();

                newGoal.MissingDifference(state, ref diff);
                goal = diff;
            }
            h = goal.Count;
            // f(node) = g(node) + h(node)
            cost = g + h * heuristicMultiplier;
        }
示例#2
0
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action)
        {
            expandList.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (action != null)
            {
                actionSettings = action.GetSettings(planner.GetCurrentAgent(), newGoal);
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // create a new instance of the goal based on the paren't goal
                goal = ReGoapState <T, W> .Instantiate(newGoal);

                var preconditions = action.GetPreconditions(goal, nextAction);
                var effects       = action.GetEffects(goal, nextAction);
                // adding the action's effects to the current node's state
                state.AddFromState(effects);
                // addding the action's cost to the node's total cost
                g += action.GetCost(goal, nextAction);

                // add all preconditions of the current action to the goal
                goal.AddFromState(preconditions);
                // removes from goal all the conditions that are now fullfiled in the node's state
                goal.ReplaceWithMissingDifference(state);
            }
            else
            {
                var diff = ReGoapState <T, W> .Instantiate();

                newGoal.MissingDifference(state, ref diff);
                goal = diff;
            }
            h = goal.Count;
            // f(node) = g(node) + h(node)
            cost = g + h * heuristicMultiplier;
        }
示例#3
0
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action)
        {
            expandList.Clear();
            tmpKeys.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (action != null)
            {
                actionSettings = action.GetSettings(planner.GetCurrentAgent(), newGoal);
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // create a new instance of the goal based on the paren't goal
                goal = ReGoapState <T, W> .Instantiate();

                var tmpGoal = ReGoapState <T, W> .Instantiate(newGoal);

                var preconditions = action.GetPreconditions(tmpGoal, nextAction);
                var effects       = action.GetEffects(tmpGoal, nextAction);
                // adding the action's effects to the current node's state
                state.AddFromState(effects);
                // addding the action's cost to the node's total cost
                g += action.GetCost(tmpGoal, nextAction);

                //// add all preconditions of the current action to the goal
                //tmpGoal.AddFromState(preconditions);
                //// removes from goal all the conditions that are now fulfilled in the node's state
                //tmpGoal.ReplaceWithMissingDifference(state);
                ////goal.ReplaceWithMissingDifference(effects);

                // collect all keys from goal & precondition, unique-ed
                foreach (var pr in tmpGoal.GetValues())
                {
                    var k = pr.Key;
                    if (!tmpKeys.Contains(k))
                    {
                        tmpKeys.Add(k);
                    }
                }
                foreach (var pr in preconditions.GetValues())
                {
                    var k = pr.Key;
                    if (!tmpKeys.Contains(k))
                    {
                        tmpKeys.Add(k);
                    }
                }

                // process each keys
                foreach (var k in tmpKeys)
                {
                    StructValue goalValue, effectValue, precondValue, stateValue, protoValue;
                    tmpGoal.GetValues().TryGetValue(k, out goalValue);
                    effects.GetValues().TryGetValue(k, out effectValue);
                    preconditions.GetValues().TryGetValue(k, out precondValue);
                    state.GetValues().TryGetValue(k, out stateValue);

                    StructValue.EValueType valueType;
                    _GetValueType(ref goalValue, ref effectValue, ref precondValue, ref stateValue, out valueType, out protoValue);
                    if (valueType == StructValue.EValueType.Arithmetic)
                    {
                        //_EnsureArithStructValueInited(ref goalValue, ref protoValue);
                        _EnsureArithStructValueInited(ref effectValue, ref protoValue);
                        _EnsureArithStructValueInited(ref precondValue, ref protoValue);
                        _EnsureArithStructValueInited(ref stateValue, ref protoValue);
                        if (!goalValue.Inited)
                        {
                            goalValue = StructValue.CopyCreate(ref stateValue, -(Convert.ToSingle(stateValue.v) - Convert.ToSingle(effectValue.v)));
                        }

                        float fGoal    = Convert.ToSingle(goalValue.v);
                        float fEffect  = Convert.ToSingle(effectValue.v);
                        float fPrecond = Convert.ToSingle(precondValue.v);
                        float fState   = Convert.ToSingle(stateValue.v);

                        float finalV = Math.Max(
                            fGoal - fEffect,
                            Math.Min(fPrecond, fPrecond - fState)
                            );

                        var sv = StructValue.CopyCreate(ref protoValue, finalV);

                        goal.SetStructValue(k, sv);
                    }
                    else if (valueType == StructValue.EValueType.Other)
                    {
                        //ReplaceWithMissingDifference
                        if (stateValue.Inited && goalValue.Inited && goalValue.IsFulfilledBy(stateValue))
                        {
                            goalValue.Invalidate();
                        }

                        // AddFromPrecond
                        // 1. if the precond is satisfied by the memory start state, then discard
                        // 2. else this newly added goal from precond, should not be removed due to fulfilled by curStateValue
                        if (precondValue.Inited)
                        {
                            bool        preCondfulfilledByMem = false;
                            var         startMemoryState      = planner.GetCurrentAgent().GetMemory().GetWorldState();
                            StructValue startMemoryValue;
                            if (startMemoryState.GetValues().TryGetValue(k, out startMemoryValue))
                            {
                                if (startMemoryValue.Inited && precondValue.IsFulfilledBy(startMemoryValue))
                                {
                                    preCondfulfilledByMem = true;
                                }
                            }

                            if (!preCondfulfilledByMem)
                            {
                                if (goalValue.Inited)
                                {
                                    goalValue = goalValue.MergeWith(precondValue);
                                }
                                else
                                {
                                    goalValue = precondValue;
                                }
                            }
                        }

                        if (goalValue.Inited)
                        {
                            goal.SetStructValue(k, goalValue);
                        }
                    }
                    else
                    {
                        UnityEngine.Debug.LogError("Unexpected StructValue type: " + valueType);
                    }
                }// foreach (var k in tmpKeys)

                tmpGoal.Recycle();
            }
            else
            {
                var diff = ReGoapState <T, W> .Instantiate();

                newGoal.MissingDifference(state, ref diff);
                goal = diff;
            }

            h = _CalculateH();

            // f(node) = g(node) + h(node)
            cost = g + h * planner.GetSettings().HeuristicMultiplier;
        }
示例#4
0
        private void Init(ReGoapAgent agent, ReGoapState newGoalState, ReGoapNode parent, ReGoapAction action)
        {
            expandList.Clear();

            ReGoapState goal = null;

            this.reGoapAgent = agent;
            this.parentNode  = parent;
            this.action      = action;
            if (action != null)
            {
                actionSettings = action.GetSettings(newGoalState);
            }

            if (parentNode != null)
            {
                agentReGoapState = parentNode.GetState().Clone();
                g = parentNode.GetPathCost();
            }
            else
            {
                ReGoapState reGoapState = agent.GetWorldState();
                agentReGoapState = reGoapState.Clone();
            }

            if (action != null)
            {
                // create a new instance of the goal based on the paren't goal
                goal = ReGoapState.Instantiate(newGoalState);

                var preconditions = action.GetPreconditions(goal);
                var effects       = action.GetEffects(goal);
                // adding the action's effects to the current node's state
                agentReGoapState.AddFromState(effects);
                // addding the action's cost to the node's total cost
                g += action.GetCost();
                // add all preconditions of the current action to the goal
                goal.AddFromState(preconditions);
                // removes from goal all the conditions that are now fullfiled in the node's state
                goal.ReplaceWithMissingDifference(agentReGoapState);
            }
            else
            {
                goal = newGoalState.MissingDifference(agentReGoapState);
            }
            h    = goal.Count;
            cost = g + h;

            //Expand(goal);

            expandList.Clear();

            List <ReGoapAction> actionsList = reGoapAgent.GetActionsSet();

            for (var index = actionsList.Count - 1; index >= 0; index--)
            {
                ReGoapAction possibleAction = actionsList[index];

                if (!possibleAction.CheckProceduralCondition())  // 执行条件不满足排除掉
                {
                    continue;
                }

                ReGoapState precond = possibleAction.GetPreconditions(goal);
                ReGoapState effects = possibleAction.GetEffects(goal);

                if (!ReGoapState.HasAny(effects, goal)) // any effect is the current goal
                {
                    continue;
                }

                if (!ReGoapState.HasAnyConflict(precond, goal))
                {
                    ReGoapNode reGoapNode = new ReGoapNode(reGoapAgent, goal, this, possibleAction);
                    expandList.Add(reGoapNode);
                }
            }
        }
示例#5
0
        private void Init(IGoapPlanner <T, W> planner, ReGoapState <T, W> newGoal, ReGoapNode <T, W> parent, IReGoapAction <T, W> action, ReGoapState <T, W> settings)
        {
            expandList.Clear();

            this.planner = planner;
            this.parent  = parent;
            this.action  = action;
            if (settings != null)
            {
                this.actionSettings = settings.Clone();
            }

            if (parent != null)
            {
                state = parent.GetState().Clone();
                // g(node)
                g = parent.GetPathCost();
            }
            else
            {
                state = planner.GetCurrentAgent().GetMemory().GetWorldState().Clone();
            }

            var nextAction = parent == null ? null : parent.action;

            if (action != null)
            {
                // create a new instance of the goal based on the paren't goal
                goal = ReGoapState <T, W> .Instantiate(newGoal);

                GoapActionStackData <T, W> stackData;
                stackData.currentState = state;
                stackData.goalState    = goal;
                stackData.next         = action;
                stackData.agent        = planner.GetCurrentAgent();
                stackData.settings     = actionSettings;

                var preconditions = action.GetPreconditions(stackData);
                var effects       = action.GetEffects(stackData);
                // addding the action's cost to the node's total cost
                g += action.GetCost(stackData);

                // adding the action's effects to the current node's state
                state.AddFromState(effects);

                // removes from goal all the conditions that are now fullfiled in the action's effects
                goal.ReplaceWithMissingDifference(effects);
                // add all preconditions of the current action to the goal
                goal.AddFromState(preconditions);
            }
            else
            {
                goal = newGoal;
            }
            h = goal.Count;
            // f(node) = g(node) + h(node)
            cost = g + h * heuristicMultiplier;

            // additionally calculate the goal without any world effect to understand if we are done
            var diff = ReGoapState <T, W> .Instantiate();

            goal.MissingDifference(planner.GetCurrentAgent().GetMemory().GetWorldState(), ref diff);
            goalMergedWithWorld = diff;
        }