//public static float timeSum = 0.0f; //public static int numCalls = 0; public override bool equals(DefaultState s1, DefaultState s2, bool isStart) { //float startTime = Time.realtimeSinceStartup; //Debug.Log("hi, " +Time.time); bool b = false; GridTimeState state1 = s1 as GridTimeState; GridTimeState state2 = s2 as GridTimeState; // MUBBASIR TODO : WHY DONT U CONSIDER TIME HERE ?? if (isStart) { if ((Mathf.Abs(state1.currentPosition.x - state2.currentPosition.x) < .5) && (Mathf.Abs(state1.currentPosition.y - state2.currentPosition.y) < .5) && (Mathf.Abs(state1.currentPosition.z - state2.currentPosition.z) < .5)) { b = true; //return true; } else { b = false; //return false; } } else { if ((Mathf.Abs(state1.currentPosition.x - state2.currentPosition.x) < .1) && (Mathf.Abs(state1.currentPosition.y - state2.currentPosition.y) < .1) && (Mathf.Abs(state1.currentPosition.z - state2.currentPosition.z) < .1)) { b = true; //return true; } else { b = false; // return false; } } /* * float endTime = Time.realtimeSinceStartup; * * timeSum += (endTime - startTime); * numCalls++; * * float meanTime = 0.0f; * if (numCalls == 100) * { * meanTime = timeSum / numCalls; * Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); * numCalls = 0; * timeSum = 0; * } */ return(b); }
// used by ara star + used by evaluateTotalCost which is used by best first search public override float ComputeHEstimate(DefaultState _from, DefaultState _to) { GridTimeState from = _from as GridTimeState; GridTimeState to = _to as GridTimeState; /* * float distanceToGoal = Vector3.Distance(to.currentPosition, from.currentPosition); * float timeToGoal = to.time - from.time; * float acceleration = Mathf.Abs( 2 * ( distanceToGoal - from.speed * timeToGoal ) / (timeToGoal * timeToGoal)) ; * float velocityRequiredToReachGoal = from.speed + acceleration * timeToGoal; * * float e1 = Mathf.Abs (from.speed * from.speed - velocityRequiredToReachGoal * velocityRequiredToReachGoal); * if (velocityRequiredToReachGoal > GridTimeDomain.MAX_SPEED ) * { * //Debug.Log ("e1 is infinity"); * e1 = Mathf.Infinity; * } */ //float e2 = Mathf.Abs(GridTimeDomain.DESIRED_SPEED_SQUARE - velocityRequiredToReachGoal * velocityRequiredToReachGoal); //float e3 = distanceToGoal; Vector3 fromStart; fromStart = to.currentPosition - from.currentPosition; float spaceCost = fromStart.magnitude / meanStepSize; //Debug.Log("old h = " + spaceCost + " HEURISTIC = " + (e1+e2+e3) + " e1 " + e1 + " e2 " + e2 + "e3 " + e3 ); return(spaceCost); //return (spaceCost + e1); //return e1+e2+e3; }
public override bool isAGoalState(ref DefaultState state, ref DefaultState idealGoalState) { ARAstarState currentState = state as ARAstarState; ARAstarState goalState = idealGoalState as ARAstarState; return(Vector3.Distance(currentState.state, goalState.state) < 1); }
public override void generateTransitions(ref DefaultState currentState, ref DefaultState previousState, ref DefaultState idealGoalState, ref List <DefaultAction> transitions) { List <Vector3> moves = new List <Vector3>(); moves.Add(new Vector3(1.0f, 0.0f, 0.0f)); moves.Add(new Vector3(-1.0f, 0.0f, 0.0f)); moves.Add(new Vector3(0.0f, 0.0f, 1.0f)); moves.Add(new Vector3(0.0f, 0.0f, -1.0f)); FringePlanningState curState = currentState as FringePlanningState; foreach (Vector3 move in moves) { if ((move + curState.state).x < x_max && (move + curState.state).x >= x_min && (move + curState.state).z < z_max && (move + curState.state).z >= z_min) { FringePlanningAction action = new FringePlanningAction(); action.cost = 1; action.direction = move; FringePlanningState st = new FringePlanningState(curState.state + move); action.state = st; transitions.Add(action); } } }
public AnalyzerDetails(string className, AnalyzerCategoryDetails category, DefaultState defaultState, DiagnosticSeverity severity, string titleResourceName, string descriptionResourceName, string messageFormatResourceName, IList <Type> suppressionAttributes) { var decomposedDetails = DecomposeDetailsFromClassName(className); var code = decomposedDetails.Item1; Name = decomposedDetails.Item2; NameWithCode = $"{decomposedDetails.Item3} - {Name}"; Category = category; _defaultState = defaultState; Severity = severity; SuppressionAttributes = suppressionAttributes; Title = LocalizableStringFactory.LocalizableResourceString(titleResourceName); Description = LocalizableStringFactory.LocalizableResourceString(descriptionResourceName); MessageFormat = LocalizableStringFactory.LocalizableResourceString(messageFormatResourceName); DiagnosticId = Title.ToString().Replace("-", ""); if (Title.ToString() != code) { throw new ArgumentException($@"Title resource value isn't of the correct format: should be {code}", nameof(titleResourceName)); } }
public override float estimateTotalCost(ref DefaultState currentState, ref DefaultState idealGoalState, float currentg) { float h = Vector3.Distance(currentState.statePosition(), idealGoalState.statePosition()); float f = currentg + h; return(f); }
// Use this for initialization void Start() { outputPlan = new Stack<DefaultAction>(); domainList = new List<PlanningDomainBase>(); domainList.Add(new ADAstarDomain()); planner = new ADAstarPlanner(); planner.init(ref domainList, 100); DStartState = ADAexecution.startState as DefaultState; DGoalState = ADAexecution.goalState as DefaultState; planner.InitializeValues(ref DStartState, ref DGoalState, 2.5f, ref outputPlan, PlannerMode.PLANNING_MODE.IncreaseFactor, .2f, 0.0f); Debug.Log(ADAstarPlanner.Closed.Count); Debug.Log("Finished"); // foreach(Edge edge in planner.edgeList.Values) // { // Debug.Log("Edge: u: " + (edge.u as ADAstarState).state); // Debug.Log("Edge: v: " + (edge.v as ADAstarState).state); // } }
public void UpdateHeuristic(DefaultState goal) { foreach (ARAstarNode n in plan.Values) { n.h = Vector3.Distance((n.action.state as ARAstarState).state, (goal as ARAstarState).state); } }
// Update is called once per frame void Update() { if (Input.GetKeyDown(KeyCode.A)) { Debug.Log("Planning"); DStartState = new BestFirstState(startObject.transform.position) as DefaultState; DGoalState = new BestFirstState(goalObject.transform.position) as DefaultState; planner.oneStep = false; planner.computePlan(ref DStartState, ref DGoalState, ref outputPlan, 10.0f); } if (Input.GetKeyDown(KeyCode.S)) { Debug.Log("Planning"); DStartState = new BestFirstState(startObject.transform.position) as DefaultState; DGoalState = new BestFirstState(goalObject.transform.position) as DefaultState; planner.oneStep = true; planner.computePlan(ref DStartState, ref DGoalState, ref outputPlan, 10.0f); } if (Input.GetKeyDown(KeyCode.Z)) { showOpen = !showOpen; } if (Input.GetKeyDown(KeyCode.X)) { showVisited = !showVisited; } }
private void ChangeDefaultColor() { if (defaultColorTimer.UpdateTimer()) { if (DState == DefaultState.ToWhite) { DState = DefaultState.ToYellow; } else { DState = DefaultState.ToWhite; } defaultColorTimer.ResetTimer(1.0f); } switch (DState) { case DefaultState.ToWhite: defaultMat.color = Color.Lerp(defaultMat.color, Color.white, Time.deltaTime * 2); break; case DefaultState.ToYellow: defaultMat.color = Color.Lerp(defaultMat.color, Color.yellow, Time.deltaTime * 2); break; default: break; } }
public AnalyzerDetails(string className, AnalyzerCategoryDetails category, DefaultState defaultState, DiagnosticSeverity severity, string titleResourceName, string descriptionResourceName, string messageFormatResourceName, IList<Type> suppressionAttributes) { var decomposedDetails = DecomposeDetailsFromClassName(className); var code = decomposedDetails.Item1; Name = decomposedDetails.Item2; NameWithCode = $"{decomposedDetails.Item3} - {Name}"; Category = category; _defaultState = defaultState; Severity = severity; SuppressionAttributes = suppressionAttributes; Title = LocalizableStringFactory.LocalizableResourceString(titleResourceName); Description = LocalizableStringFactory.LocalizableResourceString(descriptionResourceName); MessageFormat = LocalizableStringFactory.LocalizableResourceString(messageFormatResourceName); DiagnosticId = Title.ToString().Replace("-", ""); if (Title.ToString() != code) { throw new ArgumentException($@"Title resource value isn't of the correct format: should be {code}", nameof(titleResourceName)); } }
public void UpdateList(ARAstarNode currentState) { startState = currentState.action.state; Queue <ARAstarNode> queue = new Queue <ARAstarNode> (); queue.Enqueue(currentState); int n = 0; while (queue.Count > 0) { ARAstarNode state = queue.Dequeue(); UpdateState(ref state, ref queue); n++; } foreach (ARAstarNode node in dictionary.Values) { node.isDirty = false; node.touched = false; node.updated = false; dictionary[node.action.state].isDirty = false; dictionary[node.action.state].touched = false; dictionary[node.action.state].updated = false; } }
private Image GetOverlayImage(out bool bottomCorner) { bottomCorner = false; if (DefaultState.IsSet(AudioDeviceDefaultState.Multimedia)) { // Sound control panel shows the same icon between all and multimedia return(Resources.DefaultMultimediaDevice); } if (DefaultState.IsSet(AudioDeviceDefaultState.Communications)) { return(Resources.DefaultCommunicationsDevice); } bottomCorner = true; switch (State) { case AudioDeviceState.Disabled: return(Resources.Disabled); case AudioDeviceState.NotPresent: return(Resources.NotPresent); case AudioDeviceState.Unplugged: return(Resources.Unplugged); } return(null); }
// Update is called once per frame void Update() { if (ADAexecution.startState.state != startObject.transform.position) { ADAstarPlanner.startFound = false; ADAexecution.startState.state = startObject.transform.position; DStartState = ADAexecution.startState as DefaultState; } if (ADAexecution.goalState.state != goalObject.transform.position) { ADAstarPlanner.startFound = false; ADAexecution.goalState.state = goalObject.transform.position; DGoalState = ADAexecution.goalState as DefaultState; } if (ADAstarPlanner.startFound == false) { planner.InitializeValues(ref DStartState, ref DGoalState, 2.5f, ref outputPlan, PlannerMode.PLANNING_MODE.IncreaseFactor, .2f, 0.0f); } //if(!ADAexecution.startState.state.Equals(ADAexecution.goalState.state)){ // PlanningDomainBase domain = planner._planningDomain[0]; // planner.inifiniteUpdate(ref planner.edgeList, ref domain, // ref planner.Closed,ref outputPlan); //} }
public override void generatePredecesors(ref DefaultState currentState, ref DefaultState previousState, ref DefaultState idealGoalState, ref List <DefaultAction> transitions) { List <Vector3> moves = new List <Vector3>(); moves.Add(new Vector3(1.0f, 0.0f, 0.0f)); moves.Add(new Vector3(-1.0f, 0.0f, 0.0f)); moves.Add(new Vector3(0.0f, 0.0f, 1.0f)); moves.Add(new Vector3(0.0f, 0.0f, -1.0f)); moves.Add(new Vector3(1.0f, 0.0f, 1.0f)); moves.Add(new Vector3(-1.0f, 0.0f, 1.0f)); moves.Add(new Vector3(1.0f, 0.0f, -1.0f)); moves.Add(new Vector3(-1.0f, 0.0f, -1.0f)); ADAstarState curState = currentState as ADAstarState; Vector3 obs = new Vector3(3.0f, 0.5f, 0.0f); foreach (Vector3 move in moves) { if (!(curState.state - move).Equals(obs)) { ADAstarAction action = new ADAstarAction(); action.cost = 1; action.direction = -move; ADAstarState st = new ADAstarState(curState.state - move); action.state = st; transitions.Add(action); } } }
//public static float timeSum = 0.0f; //public static int numCalls = 0; // Construction function for a state that comes from a previous state with a movement of mov public GridPlanningState(GridPlanningState prevState, Vector3 mov) { //float startTime = Time.realtimeSinceStartup; previousState = prevState; actionMov = mov; ComputeActualState(mov); /* float endTime = Time.realtimeSinceStartup; timeSum += (endTime - startTime); numCalls++; float meanTime = 0.0f; if (numCalls == 100) { meanTime = timeSum / numCalls; Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); numCalls = 0; timeSum = 0; } */ }
public void SelectEntities(params Entity[] e) { DefaultState s = new DefaultState(this); ChangeState(s); s.Editor.Select(e); }
/// <summary> /// Initializes a new instance of the <see cref="ARAstarNode"/> class. /// </summary> /// <param name='_g'> /// Node g-value /// </param> /// <param name='_h'> /// Node h-value /// </param> /// <param name='_previousStateRef'> /// Node's parent's state /// </param> /// <param name='_actionRef'> /// Action taken to generate node /// </param> public ARAstarNode(float _g, float _h, DefaultState _previousStateRef, DefaultAction _actionRef) { g = _g; h = _h; previousState = _previousStateRef; action = _actionRef; }
/// <summary> /// Function to add a renderable object to the vertex buffer. /// </summary> /// <param name="renderable">Renderable object to add.</param> internal void AddRenderable(IRenderable renderable) { // Check for state changes. StateChange stateChange = DefaultState.Compare(renderable); if (stateChange != StateChange.None) { if (_cache.NeedsFlush) { Flush(); } DefaultState.UpdateState(renderable, stateChange); // If we switch vertex buffers, then reset the cache. if ((stateChange & StateChange.VertexBuffer) == StateChange.VertexBuffer) { _cache.Enabled = Graphics.Input.VertexBuffers[0].Equals(ref _defaultVertexBuffer); } } // We skip the cache for objects that have their own vertex buffers. if (!_cache.Enabled) { return; } _cache.AddVertices(renderable.Vertices, renderable.BaseVertexCount, renderable.IndexCount, 0, renderable.VertexCount); }
public void TestTransitionAction() { var machine = new StateMachine(); int a = 0, b = 0; var state1 = new CounterState(); var state2 = new DefaultState("S2"); var state3 = new DefaultState("S3"); machine.AddTransition(DefaultState.Enter, state1); machine.AddTransition(state1, state2) .When(s => s.Entered == 1) .OnTransition((s, t) => a = s.Updated); machine.AddTransition(state1, state3) .When(s => s.Entered == 2) .OnTransition((s, t) => b = s.Updated); machine.Update(0); Assert.AreEqual(1, a, double.Epsilon); Assert.AreEqual(0, b, double.Epsilon); machine.Update(0); Assert.AreEqual(1, a, double.Epsilon); Assert.AreEqual(2, b, double.Epsilon); }
public override float estimateTotalCost(ref DefaultState currentState, ref DefaultState idealGoalState, float currentg) { float h = Vector3.Distance((currentState as FringePlanningState).state, (idealGoalState as FringePlanningState).state); float f = currentg + h; return(f); }
override public bool CheckStateCollisions(DefaultState Dstate) { //float startTime = Time.realtimeSinceStartup; FootstepPlanningState state = Dstate as FootstepPlanningState; //return CheckRootCollisions(state) || CheckAgentsCollisions(state); //return CheckJointsCollisions(state) || CheckAgentsCollisions(state); bool b = (CheckJointsCollisions(state) || CheckAgentsCollisions(state) || CheckDynamicObstaclesCollisions(state)); /* * float endTime = Time.realtimeSinceStartup; * * timeSum += (endTime - startTime); * numCalls++; * * float meanTime = 0.0f; * if (numCalls == 100) * { * meanTime = timeSum / numCalls; * Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); * numCalls = 0; * timeSum = 0; * } */ return(b); }
void UpdateReference(DefaultState successor) { List <DefaultAction> neighborsList = new List <DefaultAction>(); domain.generatePredecessors(successor, ref neighborsList); foreach (DefaultAction action in neighborsList) { if (isNeighborToStartAndPreviousStateIsNotStart(successor, action.state)) { dictionary[successor].previousState = startState; //openDictionary[successor].action = new ARAstarAction(neighbor, successor); dictionary[successor].action = domain.generateAction(action.state, successor); dictionary[successor].isDirty = true; break; } else if (dictionary.ContainsKey(action.state) && dictionary[action.state].isDirty) { if (dictionary[successor].previousState != null && predIsDirtyWithLeastCost(successor, action.state)) { dictionary[successor].previousState = action.state; //openDictionary[successor].action = new ARAstarAction(neighbor, successor); dictionary[successor].action = domain.generateAction(action.state, successor); dictionary[successor].isDirty = true; } } } }
// Use this for initialization void Start() { outputPlan = new Stack <DefaultAction>(); domainList = new List <PlanningDomainBase>(); domainList.Add(new ADAstarDomain()); planner = new ADAstarPlanner(); planner.init(ref domainList, 100); DStartState = ADAexecution.startState as DefaultState; DGoalState = ADAexecution.goalState as DefaultState; planner.InitializeValues(ref DStartState, ref DGoalState, 2.5f, ref outputPlan, PlannerMode.PLANNING_MODE.IncreaseFactor, .2f, 0.0f); Debug.Log(ADAstarPlanner.Closed.Count); Debug.Log("Finished"); // foreach(Edge edge in planner.edgeList.Values) // { // Debug.Log("Edge: u: " + (edge.u as ADAstarState).state); // Debug.Log("Edge: v: " + (edge.v as ADAstarState).state); // } }
private NodeSource SetupUnextractedSource() { DefaultState defaultState = new DefaultState(Resources.cd16); TagArchiveNodeSource source = new TagArchiveNodeSource("Unextracted", currentGame, Core.Prometheus.Instance.DocumentManager, DisplayItems.AllUnextractedItems, defaultState); AddTagViewUnextractedMenus(source); return(source); }
// Construction function for an initial state public GridPlanningState(Vector3 gameObjectPosition) { previousState = null; currentPosition = gameObjectPosition; actionMov = new Vector3(0,0,0); }
//This constructor creates a previous state given a current state and an action. //The dummyType is to differentiate it from the constructor used to create successors states. public GridPlanningState(GridPlanningState currentState, Vector3 mov, float dummyType) { previousState = currentState; actionMov = mov; ComputePreviousState(mov); }
protected override void OnProcessOutputSchema(MutableObject newSchema) { DefaultState.TransmitSchema(newSchema); ChildBoundNameTarget.SetValue("Child Name", newSchema); PerChildState.TransmitSchema(newSchema); }
// used by ara star only -- public override float ComputeGEstimate(DefaultState _from, DefaultState _to) { //Debug.Log ("compute g estimate"); GridTimeState from = _from as GridTimeState; GridTimeState to = _to as GridTimeState; return(Mathf.Abs(Vector3.Distance(to.currentPosition, from.currentPosition))); }
protected override void OnProcessOutputSchema(MutableObject newSchema) { DefaultState.TransmitSchema(newSchema); var entry = Scope.GetEntries(newSchema).FirstOrDefault(); PerParticleSystem.TransmitSchema(entry.LastOrDefault()); }
public BestFirstSearchNode(float _g, float _f, ref DefaultState _previousStateRef, ref DefaultState _nextStateRef) //: g(_g), f(_f), previousState(_previousState), alreadyExpanded(false) { g = _g; f = _f; previousState = _previousStateRef; alreadyExpanded = false; action = new DefaultAction(); action.cost = 0.0f; action.state = _nextStateRef; }
override public float estimateTotalCost(ref DefaultState DcurrentState, ref DefaultState DidealGoalState, float currentg) { //float startTime = Time.realtimeSinceStartup; FootstepPlanningState currentState = DcurrentState as FootstepPlanningState; FootstepPlanningState idealGoalState = DidealGoalState as FootstepPlanningState; //Debug.Log("Estimating cost"); // For now we are just estimating the cost as the distance between states; Vector3 toGoal = idealGoalState.currentPosition - currentState.currentPosition; /* * float spaceCost = toGoal.magnitude / analyzer.meanStepSize; * float timeCost = Int32.MaxValue; * if (idealGoalState.time - currentState.time > 0) * timeCost = toGoal.magnitude / (idealGoalState.time - currentState.time); * * float angle = -currentState.currentRotation.eulerAngles.y; * Vector3 orientation = new Vector3(Mathf.Cos(angle*Mathf.Deg2Rad),0,Mathf.Sin(angle*Mathf.Deg2Rad)); * toGoal.y = 0; * float toGoalAngle = Vector3.Angle(orientation,toGoal); * * //Debug.Log("ToGoalAngle = " + toGoalAngle); * * float angleCost = Mathf.Abs(toGoalAngle) / 180; * * //float h = timeCost; * //float h = spaceCost; * float h = spaceCost + timeCost; * //float h = spaceCost + timeCost + angleCost; */ float h = 2 * analyzer.mass * toGoal.magnitude * Mathf.Sqrt(FootstepPlanningAction.e_s * FootstepPlanningAction.e_w); float f = currentg + W * h; //Debug.Log("Estimated cost = " + f); /* * float endTime = Time.realtimeSinceStartup; * * timeSum += (endTime - startTime); * numCalls++; * * float meanTime = 0.0f; * if (numCalls == 100) * { * meanTime = timeSum / numCalls; * Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); * numCalls = 0; * timeSum = 0; * } */ return(f); }
/** * @brief Computes the heuristic function (often denoted as f) that estimates the "goodness" of a state towards the goal. * * Note carerfully that this function should compute f, not h. For generalized best-first search, there is no explicit concept of h. * For example, to implement A*, this function would return f, where f = currentg + h. In this case, h is the estimated distance or cost from the current state to the goal state. * */ public virtual float estimateTotalCost(ref DefaultState currentState, ref DefaultState idealGoalState, float currentg) { /* * // if distance() is admissible, then this implementation is an A* search. * float h = Vector3.Distance((currentState as Transform).position, (idealGoalState as Transform).position); * float f = currentg + h; * return f; */ return(0); }
private PlayerStateMachine() { stateBases[0] = new DefaultState(); stateBases[1] = new BuildingState(); stateBases[2] = new FightState(); stateBases[3] = new RemovalState(); SwitchState(InputState.DefaultState); GameManager.DeathEvent += DeathReset; }
private NodeSource SetupUnextractedObjectViewSource() { DefaultState defaultState = new DefaultState(Resources.cd16); TagArchiveObjectViewNodeSource source = new TagArchiveObjectViewNodeSource("ObjectViewUnextracted", currentGame, Core.Prometheus.Instance.DocumentManager, DisplayItems.AllUnextractedItems, defaultState); source.LoadDependencies = false; AddObjectViewUnextractedMenus(source); return(source); }
public ITokenizerState Exec(char currentChar, int position, string statement, ref StringBuilder token, List<Token> tokens) { if (char.IsWhiteSpace(currentChar) || char.IsControl(currentChar)) { return this; // stay in Whitespace state } if (InStringState.IsStringDelimiter(currentChar)) { return new InStringState(); } ITokenizerState defState = new DefaultState(); defState.Exec(currentChar, position, statement,ref token, tokens); return defState; }
public NamedItemSuppresionAttributeDetails(string className, AnalyzerCategoryDetails category, DefaultState defaultState, DiagnosticSeverity severity, string titleResourceName, string descriptionResourceName, string messageFormatResourceName) { _analyzerDetails = new AnalyzerDetails(className, category, defaultState, severity, titleResourceName, descriptionResourceName, messageFormatResourceName, new List<Type>()); }
internal Tokenizer(string statement) { char currChar; ITokenizerState state = new DefaultState(); StringBuilder token = new StringBuilder(); char[] chars = statement.ToCharArray(); for (int i = 0; i < statement.Length; i++) { currChar = chars[i]; state = state.Exec(currChar, i, statement, ref token, mTokens); Context.CheckCancelled(); } if (token.Length > 0) { mTokens.Add(new Token(token.ToString())); } }
public override void generateTransitions(ref DefaultState currentState, ref DefaultState previousState, ref DefaultState idealGoalState, ref List<DefaultAction> transitions) { List<Vector3> moves = new List<Vector3>(); moves.Add(new Vector3(1.0f, 0.0f, 0.0f)); moves.Add(new Vector3(-1.0f, 0.0f, 0.0f)); moves.Add(new Vector3(0.0f, 0.0f, 1.0f)); moves.Add(new Vector3(0.0f, 0.0f, -1.0f)); FringePlanningState curState = currentState as FringePlanningState; foreach(Vector3 move in moves) { if((move + curState.state).x < x_max && (move + curState.state).x >= x_min && (move + curState.state).z < z_max && (move + curState.state).z >= z_min) { FringePlanningAction action = new FringePlanningAction(); action.cost = 1; action.direction = move; FringePlanningState st = new FringePlanningState(curState.state + move); action.state = st; transitions.Add(action); } } }
public override void generatePredecesors(ref DefaultState DcurrentState, ref DefaultState DpreviousState, ref DefaultState DidealGoalState, ref List<DefaultAction> transitions) { GridPlanningState currentState = DcurrentState as GridPlanningState; GridPlanningState idealGoalState = DidealGoalState as GridPlanningState; GridPlanningState previousState = DpreviousState as GridPlanningState; foreach ( Vector3 mov in movDirections ) { GridPlanningAction newAction = new GridPlanningAction(previousState,mov,0.0f); if (!CheckStateCollisions(newAction.state)) transitions.Add(newAction); } }
public override void generateTransitions(ref DefaultState DcurrentState, ref DefaultState DpreviousState, ref DefaultState DidealGoalState, ref List<DefaultAction> transitions) { GridPlanningState currentState = DcurrentState as GridPlanningState; GridPlanningState idealGoalState = DidealGoalState as GridPlanningState; if (currentState == null) { FootstepPlanningState fsState = DcurrentState as FootstepPlanningState; if (fsState != null) currentState = new GridPlanningState(fsState); else currentState = new GridPlanningState(DcurrentState as GridTimeState); } if (idealGoalState == null) { FootstepPlanningState fsIdealGoalState = DidealGoalState as FootstepPlanningState; if (fsIdealGoalState != null) idealGoalState = new GridPlanningState(fsIdealGoalState); else idealGoalState = new GridPlanningState(DidealGoalState as GridTimeState); } foreach ( Vector3 mov in movDirections ) { GridPlanningAction newAction = new GridPlanningAction(currentState,mov); //if (!CheckTransitionCollisions(newAction.state,DcurrentState)) if (!CheckStateCollisions(newAction.state)) { //Debug.Log(Time.time + ": grid successor generated"); transitions.Add(newAction); } } //Debug.Log(transitions.Count + " grid transitions generated at time " + Time.time); }
public override float estimateTotalCost(ref DefaultState DcurrentState, ref DefaultState DidealGoalState, float currentg) { //float startTime = Time.realtimeSinceStartup; GridPlanningState currentState = DcurrentState as GridPlanningState; GridPlanningState idealGoalState = DidealGoalState as GridPlanningState; if (currentState == null) { FootstepPlanningState fsState = DcurrentState as FootstepPlanningState; currentState = new GridPlanningState(fsState); } //Debug.Log("Estimating cost"); Vector3 toGoal; if (idealGoalState != null) // A state is a goal one if it's really close to the goal toGoal = idealGoalState.currentPosition - currentState.currentPosition; else { FootstepPlanningState fsIdealGoalState = DidealGoalState as FootstepPlanningState; toGoal = fsIdealGoalState.currentPosition - currentState.currentPosition; } float h = toGoal.magnitude/analyzer.meanStepSize; float f = currentg + W*h; //Debug.Log("Estimated cost = " + f); /* float endTime = Time.realtimeSinceStartup; timeSum += (endTime - startTime); numCalls++; float meanTime = 0.0f; if (numCalls == 100) { meanTime = timeSum / numCalls; Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); numCalls = 0; timeSum = 0; } */ return f; }
public override float evaluateDomain(ref DefaultState state) { return 1; }
public void Remove(DefaultState state) { plan.Remove(state); }
//public static float timeSum = 0.0f; //public static int numCalls = 0; public override bool equals(DefaultState s1, DefaultState s2, bool isStart) { //float startTime = Time.realtimeSinceStartup; bool b = false; GridPlanningState state1 = s1 as GridPlanningState; GridPlanningState state2 = s2 as GridPlanningState; if(isStart) { if((Mathf.Abs(state1.currentPosition.x - state2.currentPosition.x) < .5) && (Mathf.Abs(state1.currentPosition.y - state2.currentPosition.y) < .5) && (Mathf.Abs(state1.currentPosition.z - state2.currentPosition.z) < .5)) b = true;//return true; else b = false; //return false; } else { if((Mathf.Abs(state1.currentPosition.x - state2.currentPosition.x) < .1) && (Mathf.Abs(state1.currentPosition.y - state2.currentPosition.y) < .1) && (Mathf.Abs(state1.currentPosition.z - state2.currentPosition.z) < .1)) b = true; //return true; else b = false; // return false; } /* float endTime = Time.realtimeSinceStartup; timeSum += (endTime - startTime); numCalls++; float meanTime = 0.0f; if (numCalls == 100) { meanTime = timeSum / numCalls; Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); numCalls = 0; timeSum = 0; } */ return b; }
public bool ContainsState(DefaultState state) { return plan.ContainsKey(state); }
void UpdateCost(ref ARAstarNode node, DefaultState successor) { if(!dictionary[successor].touched) { dictionary[successor].g = node.g + Vector3.Distance((node.action.state as ARAstarState).state,(successor as ARAstarState).state); dictionary[successor].touched = true; dictionary[successor].g = dictionary[successor].rhs; } else if(dictionary[successor].g > node.g + Vector3.Distance((node.action.state as ARAstarState).state, (successor as ARAstarState).state)) { dictionary[successor].g = node.g + Vector3.Distance((node.action.state as ARAstarState).state, (successor as ARAstarState).state); dictionary[successor].g = dictionary[successor].rhs; } }
public void Fill(ref CloseContainer Close, Dictionary<DefaultState, ARAstarNode> Visited, ref DefaultState stateReached, PlanningDomainBase domain, ref DefaultState current, ref KeyValuePair<DefaultState, ARAstarNode> goalPair, float inflationFactor) { //DefaultState s = goalPair.Key; //Close.Insert(goalPair.Value); plan.Clear(); DefaultState s; if(Visited.ContainsKey(goalPair.Key)) s = stateReached = goalPair.Key; else s = stateReached; DefaultAction a; bool done = false; /*foreach(ARAstarNode planNode in plan.Values) { Close.Insert(planNode); } plan.Clear(); // TODO : check if we still need this function Close.UpdateReferences(inflationFactor, domain);*/ do { if (domain.equals (s, current, false)) done = true; if(Visited.ContainsKey(s)){ plan[s] = Visited[s]; s = Visited[s].previousState; } else{ break; } } while (!done); //updatePlanReference(domain); }
bool predIsDirtyWithLeastCost(DefaultState successor, DefaultState neighbor) { DefaultState prevState = dictionary[successor].previousState; if(!dictionary.ContainsKey(prevState)) return false; return (dictionary[prevState].g > dictionary[neighbor].g); }
void generatePlanList(DefaultState stateReached) { // here we are clearing the plan list path.Clear(); // TODO : what if we want someone to monitor the states in this plan // TODO : this is unnecessary -- make planner use State ARAstarState currentState = stateReached as ARAstarState; ARAstarState starttState = new ARAstarState(startState.getPosition()); Debug.Log ("generating plan to state " + currentState.state); while(!currentState.Equals(starttState)){ path.Add(new State(currentState.state)); currentState = outputPlan[currentState].previousState as ARAstarState; } // making sure start state enters as well path.Add(new State(currentState.state)); //notifyObservers(Event.GRID_PATH_CHANGED,path); }
public void InsertNode(ref DefaultState st, ref ARAstarNode node) { plan[st] = node; }
public ARAstarNode Node(DefaultState st) { return plan[st]; }
public override bool isAGoalState(ref DefaultState Dstate, ref DefaultState DidealGoalState) { //float startTime = Time.realtimeSinceStartup; GridPlanningState state = Dstate as GridPlanningState; GridPlanningState idealGoalState = DidealGoalState as GridPlanningState; if (state == null) { FootstepPlanningState fsState = Dstate as FootstepPlanningState; if (fsState != null) state = new GridPlanningState(fsState); else { GridTimeState gridTimeState = Dstate as GridTimeState; state = new GridPlanningState(gridTimeState); } } Vector3 toGoal; if (idealGoalState != null) // A state is a goal one if it's really close to the goal toGoal = idealGoalState.currentPosition - state.currentPosition; else { FootstepPlanningState fsIdealGoalState = DidealGoalState as FootstepPlanningState; if (fsIdealGoalState != null) toGoal = fsIdealGoalState.currentPosition - state.currentPosition; else { GridTimeState gtIdealGoalState = DidealGoalState as GridTimeState; toGoal = gtIdealGoalState.currentPosition - state.currentPosition; } } //bool b = toGoal.magnitude/analyzer.maxStepSize < 0.5; bool b = toGoal.magnitude/2.0 < 0.5; /* float endTime = Time.realtimeSinceStartup; timeSum += (endTime - startTime); numCalls++; float meanTime = 0.0f; if (numCalls == 100) { meanTime = timeSum / numCalls; Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); numCalls = 0; timeSum = 0; } */ return b; }
public override bool CheckStateCollisions(DefaultState Dstate) { //float startTime = Time.realtimeSinceStartup; GridPlanningState state = Dstate as GridPlanningState; bool b = false; // If we are checking a state of this domain if (state != null) b = CheckRootCollisions(state);//return CheckRootCollisions(state); else // If not { // We need to transform the state of the coarser domain // to our low resolution domain FootstepPlanningState footstepState = Dstate as FootstepPlanningState; if (footstepState != null) { state = new GridPlanningState(footstepState); b = CheckRootCollisions(state); //return CheckRootCollisions(state); } else b = false;//return false; } /* float endTime = Time.realtimeSinceStartup; timeSum += (endTime - startTime); numCalls++; float meanTime = 0.0f; if (numCalls == 100) { meanTime = timeSum / numCalls; Debug.Log("At time " + Time.time + " MeanTime = " + meanTime); numCalls = 0; timeSum = 0; } */ return b; }
void UpdateReference(DefaultState successor) { List<DefaultAction> neighborsList = new List<DefaultAction>(); domain.generatePredecessors(successor, ref neighborsList); foreach (DefaultAction action in neighborsList) { if (isNeighborToStartAndPreviousStateIsNotStart (successor, action.state)) { dictionary[successor].previousState = startState; //openDictionary[successor].action = new ARAstarAction(neighbor, successor); dictionary[successor].action = domain.generateAction(action.state, successor); dictionary[successor].isDirty = true; break; } else if (dictionary.ContainsKey(action.state) && dictionary[action.state].isDirty) { if (dictionary[successor].previousState != null && predIsDirtyWithLeastCost (successor, action.state)) { dictionary[successor].previousState = action.state; //openDictionary[successor].action = new ARAstarAction(neighbor, successor); dictionary[successor].action = domain.generateAction(action.state, successor); dictionary[successor].isDirty = true; } } } }
public override float ComputeEstimate(ref DefaultState Dfrom, ref DefaultState Dto, string estimateType) { GridPlanningState _from = Dfrom as GridPlanningState; GridPlanningState _to = Dto as GridPlanningState; if (_from == null) { FootstepPlanningState fsState = Dfrom as FootstepPlanningState; _from = new GridPlanningState(fsState); } if(estimateType == "g") return (Mathf.Abs(Vector3.Distance(_to.currentPosition, _from.currentPosition))); else if(estimateType == "h") { Vector3 fromStart; if (_to != null) // A state is a goal one if it's really close to the goal fromStart = _to.currentPosition - _from.currentPosition; else { FootstepPlanningState fsIdealGoalState = Dto as FootstepPlanningState; fromStart = fsIdealGoalState.currentPosition - _from.currentPosition; } float spaceCost = fromStart.magnitude/analyzer.meanStepSize; return spaceCost; } else return 0.0f; }
public ARAstarNode nodeForState(DefaultState state) { return dictionary[state]; }
public void UpdateHeuristic(DefaultState goal) { foreach(ARAstarNode n in plan.Values) { n.h = Vector3.Distance((n.action.state as ARAstarState).state, (goal as ARAstarState).state); } }
bool isNeighborToStartAndPreviousStateIsNotStart(DefaultState successor, DefaultState neighbor) { return (neighbor.Equals(startState) && !dictionary[successor].previousState.Equals(startState)); }
public void UpdateList(ARAstarNode currentState) { startState = currentState.action.state; Queue<ARAstarNode> queue = new Queue<ARAstarNode> (); queue.Enqueue (currentState); int n = 0; while (queue.Count > 0) { ARAstarNode state = queue.Dequeue (); UpdateState (ref state, ref queue); n++; } foreach (ARAstarNode node in dictionary.Values) { node.isDirty = false; node.touched = false; node.updated = false; dictionary[node.action.state].isDirty = false; dictionary[node.action.state].touched = false; dictionary[node.action.state].updated = false; } }