private void button1_Click_1(object sender, EventArgs e) { char test = 'a'; StateTree<string> tree = new StateTree<string>(EqualityComparer<string>.Default); for (int i=0; i<26; i++) { char test2 = (char)(test + i); tree.AddState(test2.ToString()); } MessageBox.Show(String.Join(",",tree.GetLevel0Children("a", 3))); MessageBox.Show(String.Join(",",tree.GetChildren("a", 3))); }
public override double[] value(int[] state, List <actionType> actions) { if (currentGoal.goalState == null) { currentGoal = selectGoal(state, models.Length - 1, actions); subgoals[currentGoal.level].Clear(); subgoals[currentGoal.level].Add(currentGoal); } for (int l = currentGoal.level - 1; l >= 0; l--) { if (subgoals[l].Count == 0) { // plan the route to the goal List <int[]> goalStates = stateTree.GetChildren(subgoals[l + 1][0].goalState, l + 1); int[] startState = stateTree.GetParentState(state, l); List <Tuple <int[], actionType, double> > path = pathFinder.AStar(startState, goalStates, models[l], actions, false);// l!=0); subgoals[l] = path2subgoals(path, l, models[l]); if (subgoals[l].Count == 0) // if no path is known, pass control to lowest level { goalStates = stateTree.GetChildren(subgoals[l + 1][0].goalState, l + 1); int[] currentGoalLevelState = stateTree.GetParentState(state, currentGoal.level); Console.WriteLine("couldn't find a path to level " + currentGoal.level + ": " + String.Join(",", currentGoal.goalState) + " at level " + l); throw new ApplicationException("Pathfinding failed"); currentGoal.goalState = null; for (int i = 0; i < subgoals.Length; i++) { subgoals[i].Clear(); } currentGoal = selectGoal(state, 0, actions); subgoals[0].Clear(); subgoals[0].Add(currentGoal); break; } // no local path should be more than 2 steps if (subgoals[l].Count > 3) { int[] thisOldState = subgoals[l + 1][0].startState; actionType thisAction = subgoals[l + 1][0].action; int[] thisNewState = subgoals[l + 1][0].goalState; StateTransition <int[], actionType> t = new StateTransition <int[], actionType>(thisOldState, thisAction, -10, thisNewState); // size of negative reward? models[l + 1].update(t); } } //System.IO.StreamWriter w = new System.IO.StreamWriter("log.txt", false); ////w.WriteLine("Goal: Level " + currentGoal.level + ", at " + String.Join(",", currentGoal.goalState)); //for (int k = 0; k <= currentGoal.level; k++) //{ // foreach (Goal<int[], actionType> step in subgoals[k]) // { // if (step.goalState != null) // { // foreach (int[] s in stateTree.GetLevel0Children(step.goalState, k)) // { // w.WriteLine(string.Join(",", s) + "," + (k == currentGoal.level ? "g" : "p")); // } // //w.WriteLine("Goal: Level " + Math.Max(0, l) + ", at " + String.Join(",", step.goalState)); // } // } //} //w.Flush(); w.Close(); } if (minLevel > 0) // for simulating dH lesion { double[] vals = new double[availableActions.Count]; if (currentGoal.level >= minLevel) { vals[availableActions.IndexOf(currentGoal.action)] = 1; } else { return(models[0].value(state, availableActions)); } return(vals); } double[] response = new double[actions.Count()]; for (int i = 0; i < availableActions.Count; i++) { if (actionComparer.Equals(subgoals[0][0].action, availableActions.ElementAt(i))) { response[i] = 1; } } return(response); }