Exemplo n.º 1
0
    public void UpdateReferences(float inflationFactor, PlanningDomainBase domain)
    {
        List <DefaultState> neighborsList = new List <DefaultState>();

        foreach (KeyValuePair <DefaultState, ARAstarNode> node in close)
        {
            if (node.Value.weightExpanded > inflationFactor)
            {
                float best_g = Mathf.Infinity;
                node.Value.weightExpanded = inflationFactor;
                domain.generateNeighbors(node.Key, ref neighborsList);
                foreach (DefaultState neighbor in neighborsList)
                {
                    if (close.ContainsKey(neighbor))
                    {
                        if (close[neighbor].g < best_g)
                        {
                            best_g = close[neighbor].g;
                            close[neighbor].weightExpanded = inflationFactor;
                            close[node.Key].previousState  = neighbor;
                            //close[node.Key].action = new ARAstarAction(neighbor, node.Key);
                            close[node.Key].action = domain.generateAction(neighbor, node.Key);
                        }
                    }
                }
            }
        }
    }
Exemplo n.º 2
0
 public void UpdateReferences(float inflationFactor, PlanningDomainBase domain)
 {
     List<DefaultState> neighborsList = new List<DefaultState>();
     foreach(KeyValuePair<DefaultState, ARAstarNode> node in close)
     {
         if(node.Value.weightExpanded > inflationFactor)
         {
             float best_g = Mathf.Infinity;
             node.Value.weightExpanded = inflationFactor;
             domain.generateNeighbors(node.Key, ref neighborsList);
             foreach(DefaultState neighbor in neighborsList)
             {
                 if(close.ContainsKey(neighbor))
                 {
                     if(close[neighbor].g < best_g){
                         best_g = close[neighbor].g;
                         close[neighbor].weightExpanded = inflationFactor;
                         close[node.Key].previousState = neighbor;
                         //close[node.Key].action = new ARAstarAction(neighbor, node.Key);
                         close[node.Key].action = domain.generateAction(neighbor, node.Key);
                     }
                 }
             }
         }
     }
 }
Exemplo n.º 3
0
    void generateNodePredecessors(ref PlanningDomainBase domain, ref ADAstarNode currentNode)
    {
        List <DefaultAction> possibleTransitions = new List <DefaultAction>();

        domain.generatePredecesors(ref currentNode.action.state, ref currentNode.previousState, ref goalNode.action.state, ref possibleTransitions);

        foreach (DefaultAction predecessorAction in possibleTransitions)
        {
            DefaultAction previousAction = predecessorAction;
            float         newg           = domain.ComputeEstimate(ref previousAction.state, ref goalNode.action.state, "g");
            float         newh           = domain.ComputeEstimate(ref startNode.action.state, ref previousAction.state, "h");
            ADAstarNode   previousNode   = new ADAstarNode(newg, newh, ref predecessorAction.state, ref previousAction);

            currentNode.predecessors.Add(previousNode);

            //If edge does not exist in the list, add it

            if (!edgeList.ContainsKey(new DefaultState[] { previousNode.action.state, currentNode.action.state }))
            {
                edgeList.Add(new DefaultState[] { previousNode.action.state, currentNode.action.state },
                             new Edge(previousNode.action.state, currentNode.action.state, previousNode.action.cost));
            }
            else              //If it exists update cost value if there was any change
            {
                if (edgeList[new DefaultState[] { previousNode.action.state, currentNode.action.state }].cost != currentNode.action.cost)
                {
                    //If there was a change in the cost, set bool to true and update the current cost
                    changeInEdgeCost = true;
                    edgeList[new DefaultState[] { previousNode.action.state, currentNode.action.state }].cost = currentNode.action.cost;
                }
            }
        }
    }
Exemplo n.º 4
0
    /// <summary>
    /// Determines appropriate domain (testing).
    /// </summary>
    /// <param name='domain'>
    /// Domain.
    /// </param>
    /// <param name='startState'>
    /// Start state.
    /// </param>
    void DetermineDomain(ref PlanningDomainBase domain, ref DefaultState startState)
    {
        float score = 0.0f;

        foreach (PlanningDomainBase d in _planningDomain)
        {
            if (d.evaluateDomain(ref startState) > score)
            {
                score  = d.evaluateDomain(ref startState);
                domain = d;
            }
        }
    }
Exemplo n.º 5
0
    public void InitializeValues(ref DefaultState startState, ref DefaultState goalState, float _inflationFactor, ref Stack <DefaultAction> plan, PlannerMode.PLANNING_MODE mode, float threshold, float maxTime)
    {
//		PLANNER_MODE.MODE = mode;
        PlanningDomainBase domain = default(PlanningDomainBase);

        edgeChangeThreshold = threshold;

        float score = 0;

        foreach (PlanningDomainBase d in _planningDomain)
        {
            if (d.evaluateDomain(ref startState) > score)
            {
                score  = d.evaluateDomain(ref startState);
                domain = d;
            }
        }


        Open   = new List <KeyValuePair <DefaultState, ADAstarNode> >();
        Closed = new Dictionary <DefaultState, ADAstarNode>();
        Incons = new Dictionary <DefaultState, ADAstarNode>();
        Open.Clear(); Closed.Clear(); Incons.Clear();

        // h estimate from start to start is 0
        float starth = domain.ComputeEstimate(ref startState, ref startState, "h");

        startNode      = new ADAstarNode(Mathf.Infinity, starth, Mathf.Infinity, ref startState, ref startState);
        startNode.key1 = Mathf.Infinity; startNode.key2 = Mathf.Infinity;

        float goalh = domain.ComputeEstimate(ref startState, ref goalState, "h");

        goalNode        = new ADAstarNode(Mathf.Infinity, goalh, 0.0f, ref goalState, ref goalState);
        goalNode.key1   = GetKey(goalNode)[0]; goalNode.key2 = GetKey(goalNode)[1];
        inflationFactor = _inflationFactor;
        generateNodePredecessors(ref domain, ref goalNode);

        Open.Add(new KeyValuePair <DefaultState, ADAstarNode>(goalNode.action.state, goalNode));

        ComputeorImprovePath(ref domain, maxTime);
    }
Exemplo n.º 6
0
    void updatePlanReference(PlanningDomainBase domain)
    {
        List <DefaultState> neighborsList = new List <DefaultState>();

        foreach (DefaultState state in plan.Keys)
        {
            float ming = Mathf.Infinity;
            domain.generateNeighbors(state, ref neighborsList);
            foreach (DefaultState neighbor in neighborsList)
            {
                if (plan.ContainsKey(neighbor))
                {
                    if (plan[neighbor].g < ming)
                    {
                        ming = plan[neighbor].g;
                        plan[state].previousState = neighbor;

                        //plan[state].action = new ARAstarAction(neighbor, state);
                        plan[state].action = domain.generateAction(neighbor, state);
                    }
                }
            }
        }
    }
Exemplo n.º 7
0
    void updatePlanReference(PlanningDomainBase domain)
    {
        List<DefaultState> neighborsList = new List<DefaultState>();
        foreach(DefaultState state in plan.Keys)
        {
            float ming = Mathf.Infinity;
            domain.generateNeighbors(state, ref neighborsList);
            foreach(DefaultState neighbor in neighborsList)
            {
                if(plan.ContainsKey(neighbor))
                {
                    if(plan[neighbor].g < ming)
                    {
                        ming = plan[neighbor].g;
                        plan[state].previousState = neighbor;

                        //plan[state].action = new ARAstarAction(neighbor, state);
                        plan[state].action = domain.generateAction(neighbor, state);
                    }
                }
            }
        }
    }
Exemplo n.º 8
0
    public void Fill(ref CloseContainer Close, Dictionary<DefaultState, ARAstarNode> Visited, ref DefaultState stateReached, PlanningDomainBase domain, ref DefaultState current, ref KeyValuePair<DefaultState, ARAstarNode> goalPair, float inflationFactor)
    {
        //DefaultState s = goalPair.Key;
        //Close.Insert(goalPair.Value);
        plan.Clear();
        DefaultState s;
        if(Visited.ContainsKey(goalPair.Key))
            s = stateReached = goalPair.Key;
        else
            s = stateReached;
        DefaultAction a;
        bool done = false;
        /*foreach(ARAstarNode planNode in plan.Values)
        {
            Close.Insert(planNode);
        }
        plan.Clear();

        // TODO : check if we still need this function
        Close.UpdateReferences(inflationFactor, domain);*/
        do {
            if (domain.equals (s, current, false))
                    done = true;
            if(Visited.ContainsKey(s)){
                plan[s] = Visited[s];
                s = Visited[s].previousState;
            }
            else{
                break;
            }

        } while (!done);
        //updatePlanReference(domain);
    }
Exemplo n.º 9
0
 public VisitedContainer(PlanningDomainBase d)
 {
     dictionary = new Dictionary<DefaultState, ARAstarNode>();
     domain = d;
 }
Exemplo n.º 10
0
 public void publishPlan(ref Stack <DefaultAction> plan, ref PlanningDomainBase domain)
 {
 }
Exemplo n.º 11
0
    void ComputeorImprovePath(ref PlanningDomainBase domain, float maxTime)
    {
        int i             = 0;
        int nodesExpanded = 0;

        Debug.Log("BLLLLLLLAAAAAAAAAAA");
        float prevTime = Time.realtimeSinceStartup;

        Debug.Log("OPEN: " + Open.Count);
        //Open is sorted with lowest key first, so first element always has the smallest key
        //while((ADAstarPlanner.startFound==false) && (nodesExpanded < maxNodes) && (maxTime > 0))
        while ((CompareKey(GetKey(Open.First().Value), GetKey(startNode)) == -1 || startNode.rhs != startNode.g) &&
               (nodesExpanded < maxNodes) && (maxTime > 0))
        {
            i++;
            Debug.Log("Planning: " + i);
            if (i == 48)
            {
                i = i;
            }


            if (domain.equals(Open.First().Key, startNode.action.state, true) || startNode.rhs == startNode.g)
            {
                ADAstarPlanner.startFound = true; Debug.Log("FOUND"); break;
            }


            KeyValuePair <DefaultState, ADAstarNode> currentPair = Open.First();
            ADAstarNode currentNode = currentPair.Value;


            Open.Remove(currentPair);
            currentNode.alreadyExpanded = true;
            nodesExpanded++;

            if (currentNode.g > currentNode.rhs)
            {
                currentNode.g = currentNode.rhs;
                Closed.Add(currentNode.action.state, currentNode);


                //For all s in pred(s) updateState
                foreach (ADAstarNode predecessor in currentNode.predecessors)
                {
                    ADAstarNode pred = predecessor;
                    UpdateState(ref pred);
                }
            }
            else
            {
                currentNode.g = Mathf.Infinity;

                //For all s in pred(s) U s updateState
                UpdateState(ref currentNode);
                foreach (ADAstarNode predecessor in currentNode.predecessors)
                {
                    ADAstarNode pred = predecessor;
                    UpdateState(ref pred);
                }
            }
            float actualTime = Time.realtimeSinceStartup;
            maxTime -= (actualTime - prevTime);
            prevTime = actualTime;
        }
    }
Exemplo n.º 12
0
    void UpdateState(ref ADAstarNode currentNode)
    {
        PlanningDomainBase   domain = default(PlanningDomainBase);
        List <DefaultAction> possibleTransitions = new List <DefaultAction>();

        float score = 0;

        foreach (PlanningDomainBase d in _planningDomain)
        {
            if (d.evaluateDomain(ref startNode.action.state) > score)
            {
                score  = d.evaluateDomain(ref startNode.action.state);
                domain = d;
            }
        }

        if (!currentNode.alreadyExpanded)
        {
            currentNode.g = Mathf.Infinity;
        }
        if (!domain.isAGoalState(ref currentNode.action.state, ref goalNode.action.state))
        {
            possibleTransitions.Clear();
            domain.generateTransitions(ref currentNode.action.state, ref currentNode.previousState, ref goalNode.action.state, ref possibleTransitions);

            // Determine min(c(s,s')+g(s')) for rhs for every successor
            float min_rhs = Mathf.Infinity;
            foreach (DefaultAction action in possibleTransitions)
            {
                DefaultAction nextAction = action;
                float         newh       = domain.ComputeEstimate(ref startNode.action.state, ref nextAction.state, "h");
                float         newg       = domain.ComputeEstimate(ref nextAction.state, ref goalNode.action.state, "g");
                //g is calculated as the distance to the goal, just use a dummy value -1.0 and calculate the distance next
                ADAstarNode nextNode = new ADAstarNode(newg, newh, ref currentNode.action.state, ref nextAction);

                if ((nextAction.cost + nextNode.g) < min_rhs)
                {
                    min_rhs = nextAction.cost + nextNode.g;
                }
            }
            currentNode.rhs = min_rhs;
            float[] keys = GetKey(currentNode);
            currentNode.key1 = keys[0]; currentNode.key2 = keys[1];
        }
        Debug.Log("A");
        //If open contains node, remove it.
        //foreach(KeyValuePair<DefaultState, ADAstarNode> keyval in Open)
        for (int i = 0; i < Open.Count; ++i)
        {
            if (Open[i].Key != null)
            {
                if (domain.equals(Open[i].Key, currentNode.action.state, false))
                {
                    Open.RemoveAt(i); currentNode.alreadyExpanded = true;
                }
            }
        }
        //Open = BackUp;
        //KeyValuePair<DefaultState, ADAstarNode> keyval = new KeyValuePair<DefaultState, ADAstarNode>(currentNode.action.state, currentNode);
        //if(Open.Contains(keyval)) {Open.Remove(keyval); currentNode.alreadyExpanded = true;}

        if (currentNode.g != currentNode.rhs)
        {
            bool containsNode = false;
            //foreach(DefaultState key in Closed.Keys)
            //{
            //if(domain.equals(key, currentNode.action.state))
            //if(domain.equals(key, currentNode.action.state, false))
            //{ containsNode = true; break; }
            //}
            if (Closed.ContainsKey(currentNode.action.state))
            {
                containsNode = true;
            }
            if (!containsNode)
            {
                //Generate all predecessors to keep expanding the open list
                generateNodePredecessors(ref domain, ref currentNode);

                Open.Add(new KeyValuePair <DefaultState, ADAstarNode>(currentNode.action.state, currentNode));
                //Sort by priority keys
                Open.Sort(ADAstartCopareCost.CompareCost);
            }
            else
            {
                Incons.Add(currentNode.action.state, currentNode);
            }
        }
    }
Exemplo n.º 13
0
    public bool _computePlan(ref DefaultState startState, ref DefaultState idealGoalState, Dictionary <DefaultState, FringeSearchNode> Cache, ref DefaultState actualStateReached, float maxTime)
    {
        //Make sure Cache is empty when starting to compute plan

        PlanningDomainBase domain = default(PlanningDomainBase);

        float score = 0;

        foreach (PlanningDomainBase d in _planningDomain)
        {
            if (d.evaluateDomain(ref startState) > score)
            {
                score  = d.evaluateDomain(ref startState);
                domain = d;
            }
        }

        float            newf     = domain.estimateTotalCost(ref startState, ref idealGoalState, 0.0f);
        FringeSearchNode rootNode = new FringeSearchNode(0.0f, newf, ref startState, ref startState);

        rootNode.parent = null;

        LinkedList <FringeSearchNode> fringe = new LinkedList <FringeSearchNode>();

        fringe.AddFirst(rootNode);



        foreach (KeyValuePair <DefaultState, FringeSearchNode> keyval in Cache)
        {
            if ((keyval.Key as FringePlanningState).state.Equals((startState as FringePlanningState).state))
            {
                Cache[keyval.Key] = rootNode;
                break;
            }
        }



        //flimit = h(start)
        float flimit = rootNode.f - rootNode.g;

        while (fringe.Count > 0)
        {
            float fmin = Mathf.Infinity;

            LinkedListNode <FringeSearchNode> fringeNode = fringe.First;

            while (fringeNode != null)
            {
                FringeSearchNode node = fringeNode.Value;

                FringeSearchNode tempNode = default(FringeSearchNode);
                foreach (DefaultState key in Cache.Keys)
                {
                    if ((key as FringePlanningState).state.Equals((node.action.state as FringePlanningState).state))
                    {
                        tempNode = Cache[key]; break;
                    }
                }
                //FringeSearchNode tempNode = Cache[node.action.state];



                if (tempNode.f > flimit)
                {
                    fmin       = Mathf.Min(tempNode.f, fmin);
                    fringeNode = fringeNode.Next;
                    continue;
                }

                if (domain.isAGoalState(ref tempNode.action.state, ref idealGoalState))
                {
                    actualStateReached = tempNode.action.state;
                    return(true);
                }

                List <DefaultAction> possibleActions = new List <DefaultAction>();
                possibleActions.Clear();

                domain.generateTransitions(ref tempNode.action.state, ref tempNode.previousState, ref idealGoalState, ref possibleActions);

                foreach (DefaultAction action in possibleActions)
                {
                    float newg = tempNode.g + action.cost;
                    newf = domain.estimateTotalCost(ref action.state, ref idealGoalState, newg);

                    DefaultAction    nextAction = action;
                    FringeSearchNode successor  = new FringeSearchNode(newg, newf, ref tempNode.action.state, ref nextAction);
                    successor.parent = tempNode;

                    FringeSearchNode fn     = default(FringeSearchNode);
                    bool             exists = false;
                    foreach (KeyValuePair <DefaultState, FringeSearchNode> sn in Cache)
                    {
                        if ((sn.Key as FringePlanningState).state.Equals((successor.action.state as FringePlanningState).state))
                        {
                            if (sn.Value != null)
                            {
                                { fn = sn.Value; exists = true; break; }
                            }
                        }
                    }

                    //if(Cache[successor.action.state] != null)
                    if (exists)
                    {
                        if (successor.g >= fn.g)
                        {
                            continue;
                        }
                    }

                    //If fringe contains successor
                    //if(fringe.Contains(successor))
                    //	fringe.Remove(successor);
                    foreach (FringeSearchNode s in fringe)
                    {
                        if (s.action.state.Equals(successor.action.state))
                        {
                            fringe.Remove(s);
                        }
                    }


                    fringe.AddAfter(fringe.Find(node), successor);
                    //C[s]<-(gs, n) already added when node created

                    foreach (KeyValuePair <DefaultState, FringeSearchNode> sn in Cache)
                    {
                        if ((sn.Key as FringePlanningState).state.Equals((successor.action.state as FringePlanningState).state))
                        {
                            Cache[sn.Key] = successor; break;
                        }
                    }
                    //Cache[successor.action.state].g = newg;
                    //Cache[successor.action.state].parent = node;
                }

                fringeNode = fringeNode.Next;
                fringe.Remove(tempNode);
            }
            flimit = fmin;
        }
        return(false);
    }
Exemplo n.º 14
0
 public VisitedContainer(PlanningDomainBase d)
 {
     dictionary = new Dictionary <DefaultState, ARAstarNode>();
     domain     = d;
 }
Exemplo n.º 15
0
    } //end _ComputePlan

    public bool _computeOneStep(ref DefaultState startState, ref DefaultState idealGoalState, Dictionary <DefaultState, BestFirstSearchNode> stateMap, ref DefaultState actualStateReached, float maxTime)
    {
        //SortedList<BestFirstSearchNode<PlanningState, PlanningAction>> openSet = new SortedList<BestFirstSearchNode<PlanningState, PlanningAction>>(new CompareCosts<PlanningState, PlanningAction>());

        PlanningDomainBase domain = default(PlanningDomainBase);

//		int index = 0;
        float score = 0;

        foreach (PlanningDomainBase d in _planningDomain)
        {
            if (d.evaluateDomain(ref startState) > score)
            {
                score  = d.evaluateDomain(ref startState);
                domain = d;
            }
        }

        /***** TEST of MULTIPLE DOMAINS *****/
        //domain = _planningDomain.ElementAt(1);
        /***********************************/


        if (OneStepNeedsUpdate)
        {
            Debug.Log("Plan finished, clearing Lists. Plan again");
            stateReached = startState;
            stateMap.Clear();
            openSet.Clear();
            OneStepNeedsUpdate = false;
            initPlanner        = true;
            return(false);
        }
        //Debug.Log("maxNumNodesToExpand = " + _maxNumNodesToExpand);

        if ((numNodesExpanded == _maxNumNodesToExpand) || (openSet.Count == 0))
        {
            OneStepNeedsUpdate = true;
        }
        else
        {
            numNodesExpanded++;

            openSet.Sort(CompareCosts.CompareCost);

            // get a copy of the first element of the open set (i.e. about to pop it, but only if we get past the next if-statement).
            BestFirstSearchNode x = (openSet.ElementAt(0));
            openSet.Remove(openSet.ElementAt(0));

            score = 0;
            foreach (PlanningDomainBase d in _planningDomain)
            {
                if (d.evaluateDomain(ref x.action.state) > score)
                {
                    score  = d.evaluateDomain(ref x.action.state);
                    domain = d;
                }
            }

            /***** TEST of MULTIPLE DOMAINS *****/

            /*
             * //if (stateMap.Count > 100)
             * if (numNodesExpanded > 5)
             * {
             *      //Debug.Log("We change of domain at time " + Time.time);
             *      domain = _planningDomain.ElementAt(0);
             * }
             * /***********************************/

            // ask the user if this node is a goal state.  If so, then finish up.
            //if ((_planningDomain as PlanningDomainBase<PlanningState>).isAGoalState(ref x.action.state, ref idealGoalState))
            if (domain.isAGoalState(ref x.action.state, ref idealGoalState))
            {
                actualStateReached = x.action.state;

                //Debug.Log("goal reached");
                OneStepNeedsUpdate = true;

                return(true);
            }


            if (stateMap.ContainsKey(x.action.state))
            {
                stateMap[x.action.state].alreadyExpanded = true;
            }

            // ask the user to generate all the possible actions from this state.
            List <DefaultAction> possibleActions = new List <DefaultAction>();
            possibleActions.Clear();

            //Debug.Log("generating transitions");

            domain.generateTransitions(ref x.action.state, ref x.previousState, ref idealGoalState, ref possibleActions);

            // iterate over each potential action, and add it to the open list.
            // if the node was already seen before, then it is updated if the new cost is better than the old cost.
            foreach (DefaultAction action in possibleActions)
            {
                float newg = x.g + action.cost;

                if (stateMap.ContainsKey(action.state))
                {
                    BestFirstSearchNode existingNode = stateMap[action.state];
                    // then, that means this node was seen before.
                    if (newg < existingNode.g)
                    {
                        // then, this means we need to update the node.
                        if (existingNode.alreadyExpanded == false)
                        {
                            openSet.Remove(existingNode);
                        }
                        stateMap.Remove(existingNode.action.state);
                    }
                    else
                    {
                        // otherwise, we don't bother adding this node... it already exists with a better cost.
                        continue;
                    }
                }

                DefaultAction       nextAction = action;
                float               newf       = domain.estimateTotalCost(ref action.state, ref idealGoalState, newg);
                BestFirstSearchNode nextNode   = new BestFirstSearchNode(newg, newf, ref x.action.state, ref nextAction);

                stateMap[nextNode.action.state] = nextNode;
                openSet.Add(nextNode);
            }
        }

        if (openSet.Count == 0)
        {
            // if we get here, there was no solution.
            actualStateReached = startState;
        }
        else
        {
            // if we get here, then we did not find a complete path.
            // instead, just return whatever path we could construct.
            // The idea is that if the user gave a reasonable heuristic,
            // state space, and transitions, then the next node that
            // would be expanded will be the most promising path anyway.
            //
            actualStateReached = (openSet.ElementAt(0)).action.state;
        }

        return(false);     // returns false because plan is incomplete.
    } //end _ComputePlan
Exemplo n.º 16
0
    /// <summary>
    /// Computes the plan.
    /// </summary>
    /// <returns>
    /// Plan status.
    /// </returns>
    /// <param name='currentState'>
    /// Agent state.
    /// </param>
    /// <param name='_goalState'>
    /// Goal state.
    /// </param>
    /// <param name='plan'>
    /// Dictionary where plan will be stored.
    /// </param>
    /// <param name='inflation'>
    /// Inflation factor.
    /// </param>
    /// <param name='maxTime'>
    /// Maximum alloted time.
    /// </param>
    public PathStatus computePlan(ref DefaultState currentState, ref DefaultState _goalState,
                                  ref Dictionary <DefaultState, ARAstarNode> plan, ref float inflation, float maxTime)
    {
        DefaultState s = default(DefaultState);

        selectedPlanningDomain = default(PlanningDomainBase);
        goalState = _goalState;
        DetermineDomain(ref selectedPlanningDomain, ref currentState);
        currentStart = currentState;

        // introducing function that clears some temp data (e.g. tracked non det obstacles) from domain at the beginning
        // of each plan iteration
        //domain.clearAtBeginningOfEveryPlanIteration ();

        if (firstTime)
        {
            //inflationFactor = inflation;
            InitializeValues(ref currentState, ref goalState, inflation);
            Plan = new PlanContainer(plan);
            if (OneStep)
            {
                PerformOneStep();
            }
            else
            {
                ImprovePath(maxTime);
            }
        }
        else
        {
            if (goalMoved)
            {
                UpdateAfterGoalMoved(goalState);
            }
            if (moved)
            {
                UpdateAfterStartMoved(currentState);
            }

            if (inflationFactor < 1.0f)
            {
                inflationFactor = 1.0f;
            }

            //Check start node if it moved
            if (OneStep)
            {
                PerformOneStep();
            }
            else
            {
                ImprovePath(maxTime);
            }

            if (inflationFactor == 1.0f)
            {
                plannerFinished = true;
            }
        }
        //TODO: please return Status here
        //return true;
        return(Status);
        //return (inflationFactor == 1.0F);
    }
Exemplo n.º 17
0
    //An edge is just a node using previous state, action cost as the edge cost, and action state
    public void InifiniteUpdate(ref Dictionary <DefaultState[], Edge> edges, ref Dictionary <DefaultState, ADAstarNode> nodes, ref Stack <DefaultAction> plan, float maxTime, ref DefaultState startState)
    {
        float score = 0.0f;
        PlanningDomainBase domain = default(PlanningDomainBase);

        foreach (PlanningDomainBase d in _planningDomain)
        {
            if (d.evaluateDomain(ref startState) > score)
            {
                score  = d.evaluateDomain(ref startState);
                domain = d;
            }
        }


        /*float maxChange = 0.0f;
         * if(changeInEdgeCost || inflationFactor != 1.0f)
         * {
         *
         *      foreach(Edge edge in edges.Values)
         *      {
         *              if(edge.previousCost != edge.cost)
         *              {
         *                      maxChange = Mathf.Abs(edge.cost - edge.previousCost);
         *                      edge.previousCost = edge.cost;
         *                      ADAstarNode n = nodes[edge.u];
         *                      UpdateState(ref n);
         *              }
         *      }
         * }
         *
         * if(maxChange > edgeChangeThreshold) //Decide either increase inflation factor or replan from scratch
         * {
         *      if(PLANNER_MODE.MODE.Equals(PlannerMode.PLANNING_MODE.IncreaseFactor)){
         *              inflationFactor += .1f;
         *      }
         *      else if(PLANNER_MODE.MODE.Equals(PlannerMode.PLANNING_MODE.FromScratch)){
         *              ComputeorImprovePath(ref domain, maxTime );
         *      }
         *                      //	inflationFactor += .5; // Search for a good value to increase
         *                      //OR
         *                      //ComputeorImprovePath();
         * }			*/
        //else
        if (inflationFactor > 1.0f)
        {
            inflationFactor -= .2f;             //Decide decrease amount
        }
        //Decrease inflationFactor by some amount

        //Move states from incons to open
        foreach (KeyValuePair <DefaultState, ADAstarNode> keyVal in Incons)
        {
            Open.Add(keyVal);
        }
        Incons.Clear();
        foreach (KeyValuePair <DefaultState, ADAstarNode> keyval in Open)
        {
            keyval.Value.key1 = GetKey(keyval.Value)[0];
            keyval.Value.key2 = GetKey(keyval.Value)[1];
        }
        Open.Sort(ADAstartCopareCost.CompareCost);
        //Closed.Clear();

        //computeimprove path
        ComputeorImprovePath(ref domain, maxTime);
        //publish solution
    }
Exemplo n.º 18
0
 public void setSelectedDomain(PlanningDomainBase domain)
 {
     selectedPlanningDomain = domain;
 }
Exemplo n.º 19
0
    public void Fill(ref CloseContainer Close, Dictionary <DefaultState, ARAstarNode> Visited, ref DefaultState stateReached, PlanningDomainBase domain, ref DefaultState current, ref KeyValuePair <DefaultState, ARAstarNode> goalPair, float inflationFactor)
    {
        //DefaultState s = goalPair.Key;
        //Close.Insert(goalPair.Value);
        plan.Clear();
        DefaultState s;

        if (Visited.ContainsKey(goalPair.Key))
        {
            s = stateReached = goalPair.Key;
        }
        else
        {
            s = stateReached;
        }
        DefaultAction a;
        bool          done = false;

        /*foreach(ARAstarNode planNode in plan.Values)
         * {
         *      Close.Insert(planNode);
         * }
         * plan.Clear();
         *
         * // TODO : check if we still need this function
         * Close.UpdateReferences(inflationFactor, domain);*/
        do
        {
            if (domain.equals(s, current, false))
            {
                done = true;
            }
            if (Visited.ContainsKey(s))
            {
                plan[s] = Visited[s];
                s       = Visited[s].previousState;
            }
            else
            {
                break;
            }
        } while (!done);
        //updatePlanReference(domain);
    }