public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); mind = new MindViewer(); mind.Show(); creatureId = creature_ID; creatureName = creature_Name; // Initialize Input Information inputWallAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD); inputFoodAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD); inputJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD); inputNeedJewel = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_NEED_JEWEL); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); outputGoAhead = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString()); outputGetJewel = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString()); outputEatFood = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString()); outputGoToJewel = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString()); //Create thread to simulation runThread = new Thread(CognitiveCycle); Console.WriteLine("Agent started"); }
public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); mind = new MindViewer(); mind.Show(); creatureId = creature_ID; creatureName = creature_Name; // Initialize Input Information inputWallAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD); inputFoodAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD); inputJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD); inputDistantJewel = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_DISTANT_JEWEL); inputDistantFood = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_DISTANT_FOOD); inputDeliverLeaflet = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_DELIVER_LEAFLET); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); //outputGoAhead = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString()); outputGoToClosestJewel = World.NewExternalActionChunk(CreatureActions.GO_TO_CLOSEST_JEWEL.ToString()); outputGoToClosestFood = World.NewExternalActionChunk(CreatureActions.GO_TO_CLOSEST_FOOD.ToString()); outputEatFood = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString()); outputSackJewel = World.NewExternalActionChunk(CreatureActions.SACK_JEWEL.ToString()); outputDeliverLeaflet = World.NewExternalActionChunk(CreatureActions.PREPARE_TO_DELIVER_LEAFLET.ToString()); outputStop = World.NewExternalActionChunk(CreatureActions.STOP.ToString()); //Create thread to simulation runThread = new Thread(CognitiveCycle); Console.WriteLine("Agent started"); }
public ClarionAgent(WorldServer nws, String creature_ID, String creature_Name) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); //mind = new Mind(); //mind.Show(); creatureId = creature_ID; creatureName = creature_Name; // Initialize Input Information inputWallAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD); inputFoodAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD); inputLeafletJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_LEAFLET_JEWEL_AHEAD); inputNonLeafletJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_NON_LEAFLET_JEWEL_AHEAD); inputCloseObjectAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_CLOSE_OBJECT_AHEAD); inputHasCompletedLeaflet = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_HAS_COMPLETED_LEAFLET); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); outputGoAhead = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString()); outputEat = World.NewExternalActionChunk(CreatureActions.EAT.ToString()); outputHide = World.NewExternalActionChunk(CreatureActions.HIDE.ToString()); outputSack = World.NewExternalActionChunk(CreatureActions.SACK.ToString()); outputStop = World.NewExternalActionChunk(CreatureActions.STOP.ToString()); //Create thread to simulation runThread = new Thread(CognitiveCycle); Console.WriteLine("Agent started"); }
static void SetupBPNetwork(Agent reasoner) { //Chunks for the whales, tuna, and bears DeclarativeChunk TunaChunk = World.NewDeclarativeChunk("Tuna"); DeclarativeChunk WhaleChunk = World.NewDeclarativeChunk("Whale"); DeclarativeChunk BearChunk = World.NewDeclarativeChunk("Bear"); //The 2 properties (as DV pairs) DimensionValuePair livesinwater = World.NewDimensionValuePair("lives in", "water"); DimensionValuePair eatsfish = World.NewDimensionValuePair("eats", "fish"); //The BP network to be used in the bottom level of the NACS BPNetwork net = AgentInitializer.InitializeAssociativeMemoryNetwork(reasoner, BPNetwork.Factory); //Adds the properties (as inputs) and chunks (as outputs) to the BP network net.Input.Add(livesinwater); net.Input.Add(eatsfish); net.Output.Add(TunaChunk); net.Output.Add(WhaleChunk); net.Output.Add(BearChunk); reasoner.Commit(net); //Adds the chunks to the GKS reasoner.AddKnowledge(TunaChunk); reasoner.AddKnowledge(WhaleChunk); reasoner.AddKnowledge(BearChunk); //Initializes a trainer to use to train the BP network GenericEquation trainer = ImplicitComponentInitializer.InitializeTrainer(GenericEquation.Factory, (Equation)trainerEQ); //Adds the properties (as inputs) and chunks (as outputs) to the trainer trainer.Input.Add(livesinwater); trainer.Input.Add(eatsfish); trainer.Output.Add(TunaChunk); trainer.Output.Add(WhaleChunk); trainer.Output.Add(BearChunk); trainer.Commit(); //Sets up data sets for each of the 2 properties List <ActivationCollection> sis = new List <ActivationCollection>(); ActivationCollection si = ImplicitComponentInitializer.NewDataSet(); si.Add(livesinwater, 1); sis.Add(si); si = ImplicitComponentInitializer.NewDataSet(); si.Add(eatsfish, 1); sis.Add(si); Console.Write("Training AMN..."); //Trains the BP network to report associative knowledge between the properties and the chunks ImplicitComponentInitializer.Train(net, trainer, sis, ImplicitComponentInitializer.TrainingTerminationConditions.SUM_SQ_ERROR); Console.WriteLine("Finished!"); }
public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name, List <Leaflet> leafletList) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); mind = new MindViewer(); mind.Show(); creatureId = creature_ID; creatureName = creature_Name; // Initialize Input Information inputWallCreatureAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD); inputFoodAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD); inputJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD); inputJewelInVision = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_IN_VISION); inputFuelLow = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FUEL_LOW); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); outputGoAhead = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString()); outputEatFood = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString()); outputGetJewel = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString()); outputHideJewel = World.NewExternalActionChunk(CreatureActions.HIDE_JEWEL.ToString()); outputGoalAchieved = World.NewExternalActionChunk(CreatureActions.GOAL_ACHIEVED.ToString()); outputGoToJewelInVision = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString()); outputFuelLow = World.NewExternalActionChunk(CreatureActions.FUEL_LOW.ToString()); //List<Thing> listThing = nws.SendGetCreatureState (creature_ID); //IList<Thing> response = null; //if (worldServer != null && worldServer.IsConnected) { // response = worldServer.SendGetCreatureState (creatureName); // Creature cc = (Creature)response [0]; // leaflet1.leafletID = cc.leaflets [0].leafletID; // leaflet2.leafletID = cc.leaflets [1].leafletID; // leaflet3.leafletID = cc.leaflets [2].leafletID; // Console.WriteLine ("Creature found: " + cc.Name); // Console.WriteLine ("LF1: " + cc.leaflets [0].leafletID); // Console.WriteLine ("LF2: " + cc.leaflets [1].leafletID); // Console.WriteLine ("LF3: " + cc.leaflets [2].leafletID); //} // Load leaflet control loadLeafletsControl(leafletList); mind.loadLeaflet(leafletList[0], leafletList[1], leafletList[2]); //Create thread to simulation runThread = new Thread(CognitiveCycle); //Console.WriteLine ("Agent started"); }
private void Initialize() { World.LoggingLevel = TraceLevel.Off; p1 = World.NewDimensionValuePair("Peg", 1); p2 = World.NewDimensionValuePair("Peg", 2); p3 = World.NewDimensionValuePair("Peg", 3); p4 = World.NewDimensionValuePair("Peg", 4); p5 = World.NewDimensionValuePair("Peg", 5); mp1 = World.NewExternalActionChunk(); mp2 = World.NewExternalActionChunk(); mp3 = World.NewExternalActionChunk(); mp4 = World.NewExternalActionChunk(); mp5 = World.NewExternalActionChunk(); mp1 += p1; mp2 += p2; mp3 += p3; mp4 += p4; mp5 += p5; John = World.NewAgent(); net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory); net.Input.Add(p1); net.Input.Add(p2); net.Input.Add(p3); net.Input.Add(p4); net.Input.Add(p5); net.Output.Add(mp1); net.Output.Add(mp2); net.Output.Add(mp3); net.Output.Add(mp4); net.Output.Add(mp5); net.Parameters.LEARNING_RATE = 1; net.Parameters.MOMENTUM = .01; John.Commit(net); RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.01; RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.4; }
public void SetNeuralNetworkInputNodes(int NumberOfInputNodes) { #region Amino Acid List List <AminoAcid> AminoAcidList = new List <AminoAcid>(); #region List Entries AminoAcidList.Add(new AminoAcid("Alanine", 'A')); AminoAcidList.Add(new AminoAcid("Arginine", 'R')); AminoAcidList.Add(new AminoAcid("Asparagine", 'N')); AminoAcidList.Add(new AminoAcid("Aspartic Acid", 'D')); AminoAcidList.Add(new AminoAcid("Cystein", 'C')); AminoAcidList.Add(new AminoAcid("Glutamine", 'Q')); AminoAcidList.Add(new AminoAcid("Glutamic Acid", 'E')); AminoAcidList.Add(new AminoAcid("Glycine", 'G')); AminoAcidList.Add(new AminoAcid("Histidine", 'H')); AminoAcidList.Add(new AminoAcid("Isoleucine", 'I')); AminoAcidList.Add(new AminoAcid("Leucine", 'L')); AminoAcidList.Add(new AminoAcid("Lysine", 'K')); AminoAcidList.Add(new AminoAcid("Methionine", 'M')); AminoAcidList.Add(new AminoAcid("Phenylalanine", 'F')); AminoAcidList.Add(new AminoAcid("Proline", 'P')); AminoAcidList.Add(new AminoAcid("Serine", 'S')); AminoAcidList.Add(new AminoAcid("Threonine", 'T')); AminoAcidList.Add(new AminoAcid("Tryptophan", 'W')); AminoAcidList.Add(new AminoAcid("Tyrosine", 'Y')); AminoAcidList.Add(new AminoAcid("Valine", 'V')); AminoAcidList.Add(new AminoAcid("Default", 'X')); #endregion #endregion String NodeID = null; for (int NetworkNodeCount = 1; NetworkNodeCount <= NumberOfInputNodes; NetworkNodeCount++) { NodeID = "NetworkNode_" + NetworkNodeCount; for (int AminoAcidCount = 0; AminoAcidCount < AminoAcidList.Count; AminoAcidCount++) { DimensionValuePair DimValuePair = World.NewDimensionValuePair(NodeID, AminoAcidList.ElementAt(AminoAcidCount).getSymbol()); NeuralNetwork.Input.Add(DimValuePair); } } }
public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); mind = new MindViewer(); mind.Show(); creatureId = creature_ID; creatureName = creature_Name; // Initialize Input Information inputWallCreatureAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD); inputFoodAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD); inputJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD); inputJewelInVision = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_IN_VISION); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); outputGoAhead = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString()); outputEatFood = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString()); outputGetJewel = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString()); outputHideJewel = World.NewExternalActionChunk(CreatureActions.HIDE_JEWEL.ToString()); outputGoalAchieved = World.NewExternalActionChunk(CreatureActions.GOAL_ACHIEVED.ToString()); outputGoToJewelInVision = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString()); /*List<Thing> thingList = nws.SendGetCreatureState (creature_ID); * foreach(Thing t in thingList) { * Console.WriteLine ("Thing: "); * Console.Write (t); * }*/ // Define jewels out of scope jewelOutOfScope.Add("Orange"); jewelOutOfScope.Add("DarkGray_Spoiled"); //Create thread to simulation runThread = new Thread(CognitiveCycle); Console.WriteLine("Agent started"); }
public double CalculateSupport_MemoryGroup(ActivationCollection si, Rule r = null) { DimensionValuePair currentP = (from t in si where t.WORLD_OBJECT.AsDimensionValuePair.Dimension.ToString() == "Current P" && r.GeneralizedCondition.Contains(t.WORLD_OBJECT, true) select t.WORLD_OBJECT.AsDimensionValuePair).FirstOrDefault(); DimensionValuePair previousW = (from t in si where t.WORLD_OBJECT is ExternalActionChunk && r.GeneralizedCondition.Contains(t.WORLD_OBJECT, true) select t.WORLD_OBJECT.AsDimensionValuePair).FirstOrDefault(); if (currentP == null || previousW == null) { return(0); } else { return((si[currentP] == John.Parameters.MAX_ACTIVATION && si[previousW] == John.Parameters.MAX_ACTIVATION) ? 1 : 0); } }
// To indicate we are done. //Boolean allJewelsCollected = false; //Boolean reachedDeliverySpot = false; #endregion #region Constructor public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); mind = new MindViewer(); mind.Show(); creatureId = creature_ID; creatureName = creature_Name; // Create a (fake) delivery spot. // It seems WSProxy.cs does not support it, so place it at (0,0). deliverySpot = new Thing(); deliverySpot.X1 = 0; deliverySpot.Y1 = 0; // Initialize Input Information inputWallAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD); inputJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD); inputFoodAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD); inputJewelAway = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AWAY); inputFoodAway = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AWAY); inputAllJewelsCollected = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_ALL_JEWELS_COLLECTED); inputCreatureCanDeliver = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_CREATURE_CAN_DELIVER); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); outputGetJewel = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString()); outputGetFood = World.NewExternalActionChunk(CreatureActions.GET_FOOD.ToString()); outputGoToJewel = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString()); outputGoToFood = World.NewExternalActionChunk(CreatureActions.GO_TO_FOOD.ToString()); outputWander = World.NewExternalActionChunk(CreatureActions.WANDER.ToString()); outputGoToDeliverySpot = World.NewExternalActionChunk(CreatureActions.GO_TO_DELIVER.ToString()); outputDoDelivery = World.NewExternalActionChunk(CreatureActions.DELIVER.ToString()); //Create thread to simulation runThread = new Thread(CognitiveCycle); Console.WriteLine("Agent started"); }
/// <summary> /// Output action that makes the agent go ahead /// </summary> #endregion #endregion #region Constructor public ClarionAgent(WorldServer nws, String creature_ID, String creature_Name) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); mind = new Mind(); mind.Show(); creatureId = creature_ID; creatureName = creature_Name; // Initialize Input Information inputAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_AHEAD); inputGoItem = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_GO_ITEM); inputSackItItem = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_SACKIT_ITEM); inputEatItem = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_EAT_ITEM); inputStopCreature = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_STOP_CREATURE); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); outputGoItem = World.NewExternalActionChunk(CreatureActions.GO_ITEM.ToString()); outputSackItItem = World.NewExternalActionChunk(CreatureActions.SACKIT_ITEM.ToString()); outputEatItem = World.NewExternalActionChunk(CreatureActions.EAT_ITEM.ToString()); outputStopCreature = World.NewExternalActionChunk(CreatureActions.STOP_CREATURE.ToString()); //Create thread to simulation runThread = new Thread(CognitiveCycle); Console.WriteLine("Agent started"); }
/// </summary> /// <param name="nws"></param> /// <param name="creature_ID"></param> /// <param name="creature_Name"></param> #endregion #region Constructor public ClarionAgent(WorldServer nws, String creature_ID, String creature_Name) { worldServer = nws; // Initialize the agent CurrentAgent = World.NewAgent("Current Agent"); // FMT 15/06 - temp disable mind //mind = new Mind(); //mind.Show (); creatureId = creature_ID; creatureName = creature_Name; // Initialize Input Information inputWallAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD); // FMT 29/04/2017 inputFoodAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD); inputJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD); inputLeafletJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_LEAFLET_JEWEL_AHEAD); inputJewelHide = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_HIDE); inputEnergyLow = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_ENERGY_LOW); inputCloseObject = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_CLOSE_OBJECT); inputLeafletComplete = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_LEAFLET_COMPLETE); // Initialize Output actions outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString()); outputGoAhead = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString()); // FMT 29/04/2017 outputEat = World.NewExternalActionChunk(CreatureActions.EAT.ToString()); outputGet = World.NewExternalActionChunk(CreatureActions.GET.ToString()); outputGoTo = World.NewExternalActionChunk(CreatureActions.GO_TO.ToString()); outputHide = World.NewExternalActionChunk(CreatureActions.HIDE.ToString()); outputStop = World.NewExternalActionChunk(CreatureActions.STOP.ToString()); //Create thread to simulation runThread = new Thread(CognitiveCycle); Console.WriteLine("Agent started"); }
static void Main(string[] args) { //Initialize the task Console.WriteLine("Initializing the Simple Hello World Task"); int CorrectCounter = 0; int NumberTrials = 10000; int progress = 0; World.LoggingLevel = TraceLevel.Off; TextWriter orig = Console.Out; StreamWriter sw = File.CreateText("HelloWorldSimple.txt"); DimensionValuePair hi = World.NewDimensionValuePair("Salutation", "Hellow"); DimensionValuePair bye = World.NewDimensionValuePair("Salutation", "Goodbyew"); ExternalActionChunk sayHi = World.NewExternalActionChunk("Hello"); ExternalActionChunk sayBye = World.NewExternalActionChunk("Goodbye"); //Initialize the Agent Agent John = World.NewAgent("John"); SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory); net.Input.Add(hi); net.Input.Add(bye); net.Output.Add(sayHi); net.Output.Add(sayBye); John.Commit(net); net.Parameters.LEARNING_RATE = 1; John.ACS.Parameters.PERFORM_RER_REFINEMENT = false; //Run the task Console.WriteLine("Running the Simple Hello World Task"); Console.SetOut(sw); Random rand = new Random(); SensoryInformation si; ExternalActionChunk chosen; for (int i = 0; i < NumberTrials; i++) { si = World.NewSensoryInformation(John); //Randomly choose an input to perceive. if (rand.NextDouble() < .5) { //Say "Hello" si.Add(hi, John.Parameters.MAX_ACTIVATION); si.Add(bye, John.Parameters.MIN_ACTIVATION); } else { //Say "Goodbye" si.Add(hi, John.Parameters.MIN_ACTIVATION); si.Add(bye, John.Parameters.MAX_ACTIVATION); } //Perceive the sensory information John.Perceive(si); //Choose an action chosen = John.GetChosenExternalAction(si); //Deliver appropriate feedback to the agent if (chosen == sayHi) { //The agent said "Hello". if (si[hi] == John.Parameters.MAX_ACTIVATION) { //The agent responded correctly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct"); //Record the agent's success. CorrectCounter++; //Give positive feedback. John.ReceiveFeedback(si, 1.0); } else { //The agent responded incorrectly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect"); //Give negative feedback. John.ReceiveFeedback(si, 0.0); } } else { //The agent said "Goodbye". if (si[bye] == John.Parameters.MAX_ACTIVATION) { //The agent responded correctly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct"); //Record the agent's success. CorrectCounter++; //Give positive feedback. John.ReceiveFeedback(si, 1.0); } else { //The agent responded incorrectly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect"); //Give negative feedback. John.ReceiveFeedback(si, 0.0); } } Console.SetOut(orig); progress = (int)(((double)(i + 1) / (double)NumberTrials) * 100); Console.CursorLeft = 0; Console.Write(progress + "% Complete.."); Console.SetOut(sw); } //Report Results Console.WriteLine("Reporting Results for the Simple Hello World Task"); Console.WriteLine("John got " + CorrectCounter + " correct out of " + NumberTrials + " trials (" + (int)Math.Round(((double)CorrectCounter / (double)NumberTrials) * 100) + "%)"); Console.WriteLine("At the end of the task, John had learned the following rules:"); foreach (var i in John.GetInternals(Agent.InternalContainers.ACTION_RULES)) { Console.WriteLine(i); } sw.Close(); Console.SetOut(orig); Console.CursorLeft = 0; Console.WriteLine("100% Complete.."); //Kill the agent to end the task Console.WriteLine("Killing John to end the program"); John.Die(); Console.WriteLine("John is Dead"); Console.WriteLine("The Simple Hello World Task has finished"); Console.WriteLine("The results have been saved to \"HelloWorldSimple.txt\""); Console.Write("Press any key to exit"); Console.ReadKey(true); }
public void Initialize() { // Dimension Value Pairs: sayWhat = World.NewDimensionValuePair("YourAction", "What do you want to do?"); // External Action Chunks: sayCooperate = World.NewExternalActionChunk("Cooperate"); sayDefect = World.NewExternalActionChunk("Defect"); // placeholder // GoalChunk salute = World.NewGoalChunk("Salute"); // GoalChunk bidFarewell = World.NewGoalChunk("Bid Farewell"); // WM Actions: wmuacC = World.NewWorkingMemoryUpdateActionChunk("Remember my opponent cooperated"); wmuacD = World.NewWorkingMemoryUpdateActionChunk("Remember my opponent defected"); DeclarativeChunk dcoc = World.NewDeclarativeChunk("My opponent cooperated"); DeclarativeChunk dcod = World.NewDeclarativeChunk("My opponent defected"); wmuacC.Add(WorkingMemory.RecognizedActions.SET_RESET, dcoc); wmuacD.Add(WorkingMemory.RecognizedActions.SET_RESET, dcod); // Set up a two agent model (meaning two agents with the same setup, playing against each other) Alice = World.NewAgent("Alice"); Bob = World.NewAgent("Bob"); // Simulating environment will determine inputs to each agent based on what each agent does.. // Feedback is determined by payoff matrix.. payoff = new int [2, 2, 2]; // Doing this the hard way. Could set this up all in-line above, but this makes the table // more explicit in terms of how we want to use it. // The payoff matrix here is called "Friend or Foe", about the simplest case // indices mean: FOR-WHICH-AGENT, WHAT-ALICE-DOES, WHAT-BOB-DOES payoff[_ALICE, _COOPERATE, _COOPERATE] = 1; payoff[_ALICE, _COOPERATE, _DEFECT] = 0; payoff[_ALICE, _DEFECT, _COOPERATE] = 2; payoff[_ALICE, _DEFECT, _DEFECT] = 0; payoff[_BOB, _COOPERATE, _COOPERATE] = 1; payoff[_BOB, _COOPERATE, _DEFECT] = 2; payoff[_BOB, _DEFECT, _COOPERATE] = 0; payoff[_BOB, _DEFECT, _DEFECT] = 0; maxpay = 2; results = new int[_TRIALS, 2, 2]; // Set up a Q-learning Net = // -- Eligibility Condition = True if "What do you want to do?" is in input, otherwise False // -- Input = "My opponent cooperated", "My opponent defected", "What do you want to do?" // -- Output = "I want to defect", "I want to cooperate" // // Also, RER is turned ON QBPNetwork net_A = AgentInitializer.InitializeImplicitDecisionNetwork(Alice, QBPNetwork.Factory, QNetEC); net_A.Input.Add(sayWhat); net_A.Input.Add(sayCooperate); net_A.Input.Add(sayDefect); net_A.Output.Add(sayCooperate); net_A.Output.Add(sayDefect); Alice.Commit(net_A); net_A.Parameters.LEARNING_RATE = 1; Alice.ACS.Parameters.PERFORM_RER_REFINEMENT = true; // it's true by default anyway Alice.ACS.Parameters.LEVEL_SELECTION_METHOD = ActionCenteredSubsystem.LevelSelectionMethods.COMBINED; Alice.ACS.Parameters.LEVEL_SELECTION_OPTION = ActionCenteredSubsystem.LevelSelectionOptions.FIXED; Alice.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE = 1; Alice.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE = 1; Alice.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 1; Alice.ACS.Parameters.WM_UPDATE_ACTION_PROBABILITY = 1; // Rules (2 rules) = // Rule 1: // -- Condition = "Your opponent cooperated" // -- Action = Set "My opponent cooperated" in WM // Rule 2: // -- Condition = "Your opponent defected" // -- Action = Set "My opponent defect" in WM FixedRule ruleA1 = AgentInitializer.InitializeActionRule(Alice, FixedRule.Factory, wmuacC, FRSC); FixedRule ruleA2 = AgentInitializer.InitializeActionRule(Alice, FixedRule.Factory, wmuacD, FRSC); Alice.Commit(ruleA1); Alice.Commit(ruleA2); QBPNetwork net_B = AgentInitializer.InitializeImplicitDecisionNetwork(Bob, QBPNetwork.Factory, QNetEC); net_B.Input.Add(sayWhat); net_B.Input.Add(sayCooperate); net_B.Input.Add(sayDefect); net_B.Output.Add(sayCooperate); net_B.Output.Add(sayDefect); Bob.Commit(net_B); // Use Weighted Combination // NO partial match on TL net_B.Parameters.LEARNING_RATE = 1; Bob.ACS.Parameters.PERFORM_RER_REFINEMENT = true; Bob.ACS.Parameters.LEVEL_SELECTION_METHOD = ActionCenteredSubsystem.LevelSelectionMethods.COMBINED; Bob.ACS.Parameters.LEVEL_SELECTION_OPTION = ActionCenteredSubsystem.LevelSelectionOptions.FIXED; Bob.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE = 1; Bob.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE = 1; Bob.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 1; Bob.ACS.Parameters.WM_UPDATE_ACTION_PROBABILITY = 1; FixedRule ruleB1 = AgentInitializer.InitializeActionRule(Bob, FixedRule.Factory, wmuacC, FRSC); FixedRule ruleB2 = AgentInitializer.InitializeActionRule(Bob, FixedRule.Factory, wmuacD, FRSC); Bob.Commit(ruleB1); Bob.Commit(ruleB2); // Initially using the same parameters for RER as Full Hello World RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.6; RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.1; RefineableActionRule.GlobalParameters.INFORMATION_GAIN_OPTION = RefineableActionRule.IGOptions.PERFECT; /* * Note -- What should be seems is that when you pass in "Your opponent…", * the agent should return the "Do Nothing" external action * (since it performed an internal WM action).. * However, you can just ignore this either way.. */ }
public void Run() { foreach (Tasks t in Enum.GetValues(typeof(Tasks))) { int max_i = ((t == Tasks.PERSON) ? 2 * numTestTrials : numTestTrials); foreach (Groups g in Enum.GetValues(typeof(Groups))) { Console.WriteLine("Running Group " + g + " through task " + t); for (int r = 0; r < numRepeats; r++) { Console.Write("Participant #" + r + " is performing the task "); double currentW = rand.Next(12); double lastP = rand.Next(12); Initialize(g); ActivationCollection irlSI = ImplicitComponentInitializer.NewDataSet(); var irlVars = (from a in As select from b in Bs select from c in Cs select new { A = World.GetDimensionValuePair("A", a), B = World.GetDimensionValuePair("B", b), C = World.GetDimensionValuePair("C", c) }).SelectMany(k => k).SelectMany(k => k); foreach (var k in irlVars) { irlSI.Add(k.A, 1); irlSI.Add(k.B, 1); irlSI.Add(k.C, 1); } DimensionValuePair targetDV = World.GetDimensionValuePair("Target P", target); GenerateIRLRuleSet(IRL_Rule_Sets.ONE); SensoryInformation si = null; SensoryInformation prevSI; for (int i = 0; i < max_i; i++) { int shift = 10 - (int)Math.Round(10 * ((double)i / (double)max_i)); Console.CursorLeft -= shift; Console.Write("."); for (int s = 0; s < shift - 1; s++) { Console.Write(" "); } if ((from a in John.GetInternals(Agent.InternalContainers.ACTION_RULES) where a is IRLRule select a).Count() == 0) { GenerateIRLRuleSet(IRL_Rule_Sets.TWO); } prevSI = si; si = World.NewSensoryInformation(John); foreach (var s in irlSI) { si.Add(s); } si.Add(targetDV, 1); si.Add(World.GetActionChunk(currentW), 1); lastP = FactoryOutput(lastP, currentW); si.Add(World.GetDimensionValuePair("Current P", lastP), 1); if (Math.Abs(lastP - target) < double.Epsilon) { if ((t != Tasks.PERSON || (t == Tasks.PERSON && i >= numTestTrials))) { results[(int)t, (int)g, r]++; } if (prevSI != null) { John.ReceiveFeedback(prevSI, 1); } } else { if (prevSI != null) { John.ReceiveFeedback(prevSI, 0); } } John.Perceive(si); currentW = (double)John.GetChosenExternalAction(si).LabelAsIComparable; } Console.WriteLine(); Console.WriteLine("Participant #" + r + " is finished performing the task and hit the target " + results[(int)t, (int)g, r] + " times out of " + max_i); Console.WriteLine("At the end of the task, the participant had the following rules: "); foreach (var ar in John.GetInternals(Agent.InternalContainers.ACTION_RULES)) { Console.WriteLine(ar); } John.Die(); World.Remove(John); } } Console.WriteLine("Tabular results for the " + t + " task:"); Console.WriteLine("Group\tParticipant\tHits"); foreach (Groups g in Enum.GetValues(typeof(Groups))) { for (int i = 0; i < numRepeats; i++) { Console.WriteLine(g + "\t" + i + "\t" + results[(int)t, (int)g, i]); } } } }
static void Main(string[] args) { //Initialize the task Console.WriteLine("Initializing the Full Hello World Task"); int CorrectCounter = 0; int NumberTrials = 20000; Random rand = new Random(); World.LoggingLevel = TraceLevel.Off; int progress = 0; TextWriter orig = Console.Out; StreamWriter sw = File.CreateText("HelloWorldFull.txt"); DimensionValuePair hi = World.NewDimensionValuePair("Salutation", "Hello"); DimensionValuePair bye = World.NewDimensionValuePair("Salutation", "Goodbye"); ExternalActionChunk sayHi = World.NewExternalActionChunk("Hello"); ExternalActionChunk sayBye = World.NewExternalActionChunk("Goodbye"); GoalChunk salute = World.NewGoalChunk("Salute"); GoalChunk bidFarewell = World.NewGoalChunk("Bid Farewell"); //Initialize the Agent Agent John = World.NewAgent("John"); SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory); net.Input.Add(salute, "goals"); net.Input.Add(bidFarewell, "goals"); net.Input.Add(hi); net.Input.Add(bye); net.Output.Add(sayHi); net.Output.Add(sayBye); net.Parameters.LEARNING_RATE = 1; John.Commit(net); John.ACS.Parameters.VARIABLE_BL_BETA = .5; John.ACS.Parameters.VARIABLE_RER_BETA = .5; John.ACS.Parameters.VARIABLE_IRL_BETA = 0; John.ACS.Parameters.VARIABLE_FR_BETA = 0; RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.6; RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.1; RefineableActionRule.GlobalParameters.INFORMATION_GAIN_OPTION = RefineableActionRule.IGOptions.PERFECT; AffiliationBelongingnessDrive ab = AgentInitializer.InitializeDrive(John, AffiliationBelongingnessDrive.Factory, rand.NextDouble(), (DeficitChangeProcessor)HelloWorldFull_DeficitChange); DriveEquation abd = AgentInitializer.InitializeDriveComponent(ab, DriveEquation.Factory); ab.Commit(abd); John.Commit(ab); AutonomyDrive aut = AgentInitializer.InitializeDrive(John, AutonomyDrive.Factory, rand.NextDouble(), (DeficitChangeProcessor)HelloWorldFull_DeficitChange); DriveEquation autd = AgentInitializer.InitializeDriveComponent(aut, DriveEquation.Factory); aut.Commit(autd); John.Commit(aut); GoalSelectionModule gsm = AgentInitializer.InitializeMetaCognitiveModule(John, GoalSelectionModule.Factory); GoalSelectionEquation gse = AgentInitializer.InitializeMetaCognitiveDecisionNetwork(gsm, GoalSelectionEquation.Factory); gse.Input.Add(ab.GetDriveStrength()); gse.Input.Add(aut.GetDriveStrength()); GoalStructureUpdateActionChunk su = World.NewGoalStructureUpdateActionChunk(); GoalStructureUpdateActionChunk bu = World.NewGoalStructureUpdateActionChunk(); su.Add(GoalStructure.RecognizedActions.SET_RESET, salute); bu.Add(GoalStructure.RecognizedActions.SET_RESET, bidFarewell); gse.Output.Add(su); gse.Output.Add(bu); gsm.SetRelevance(su, ab, 1); gsm.SetRelevance(bu, aut, 1); gsm.Commit(gse); John.Commit(gsm); John.MS.Parameters.CURRENT_GOAL_ACTIVATION_OPTION = MotivationalSubsystem.CurrentGoalActivationOptions.FULL; //Run the task Console.WriteLine("Running the Full Hello World Task"); Console.SetOut(sw); SensoryInformation si; ExternalActionChunk chosen; for (int i = 0; i < NumberTrials; i++) { si = World.NewSensoryInformation(John); si[AffiliationBelongingnessDrive.MetaInfoReservations.STIMULUS, typeof(AffiliationBelongingnessDrive).Name] = 1; si[AutonomyDrive.MetaInfoReservations.STIMULUS, typeof(AutonomyDrive).Name] = 1; //Randomly choose an input to perceive. if (rand.NextDouble() < .5) { //Say "Hello" si.Add(hi, John.Parameters.MAX_ACTIVATION); si.Add(bye, John.Parameters.MIN_ACTIVATION); } else { //Say "Goodbye" si.Add(hi, John.Parameters.MIN_ACTIVATION); si.Add(bye, John.Parameters.MAX_ACTIVATION); } //Perceive the sensory information John.Perceive(si); //Choose an action chosen = John.GetChosenExternalAction(si); //Deliver appropriate feedback to the agent if (chosen == sayHi) { //The agent said "Hello". if (si[hi] == John.Parameters.MAX_ACTIVATION) { //The agent responded correctly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct"); //Record the agent's success. CorrectCounter++; //Give positive feedback. John.ReceiveFeedback(si, 1.0); } else { //The agent responded incorrectly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect"); //Give negative feedback. John.ReceiveFeedback(si, 0.0); } } else { //The agent said "Goodbye". if (si[bye] == John.Parameters.MAX_ACTIVATION) { //The agent responded correctly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct"); //Record the agent's success. CorrectCounter++; //Give positive feedback. John.ReceiveFeedback(si, 1.0); } else { //The agent responded incorrectly Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect"); //Give negative feedback. John.ReceiveFeedback(si, 0.0); } } Console.SetOut(orig); progress = (int)(((double)(i + 1) / (double)NumberTrials) * 100); Console.CursorLeft = 0; Console.Write(progress + "% Complete.."); Console.SetOut(sw); } //Report Results Console.WriteLine("Reporting Results for the Full Hello World Task"); Console.WriteLine("John got " + CorrectCounter + " correct out of " + NumberTrials + " trials (" + (int)Math.Round(((double)CorrectCounter / (double)NumberTrials) * 100) + "%)"); Console.WriteLine("At the end of the task, John had learned the following rules:"); foreach (var i in John.GetInternals(Agent.InternalContainers.ACTION_RULES)) { Console.WriteLine(i); } sw.Close(); Console.SetOut(orig); Console.CursorLeft = 0; Console.WriteLine("100% Complete.."); //Kill the agent to end the task Console.WriteLine("Killing John to end the program"); John.Die(); Console.WriteLine("John is Dead"); Console.WriteLine("The Full Hello World Task has finished"); Console.WriteLine("The results have been saved to \"HelloWorldFull.txt\""); Console.Write("Press any key to exit"); Console.ReadKey(true); }