Пример #1
0
        public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            mind         = new MindViewer();
            mind.Show();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Initialize Input Information
            inputWallAhead  = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD);
            inputFoodAhead  = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD);
            inputJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD);
            inputNeedJewel  = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_NEED_JEWEL);


            // Initialize Output actions
            outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());
            outputGoAhead         = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString());
            outputGetJewel        = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString());
            outputEatFood         = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString());
            outputGoToJewel       = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString());

            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            Console.WriteLine("Agent started");
        }
Пример #2
0
        public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            mind         = new MindViewer();
            mind.Show();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Initialize Input Information
            inputWallAhead      = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD);
            inputFoodAhead      = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD);
            inputJewelAhead     = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD);
            inputDistantJewel   = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_DISTANT_JEWEL);
            inputDistantFood    = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_DISTANT_FOOD);
            inputDeliverLeaflet = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_DELIVER_LEAFLET);

            // Initialize Output actions
            outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());
            //outputGoAhead = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString());
            outputGoToClosestJewel = World.NewExternalActionChunk(CreatureActions.GO_TO_CLOSEST_JEWEL.ToString());
            outputGoToClosestFood  = World.NewExternalActionChunk(CreatureActions.GO_TO_CLOSEST_FOOD.ToString());
            outputEatFood          = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString());
            outputSackJewel        = World.NewExternalActionChunk(CreatureActions.SACK_JEWEL.ToString());
            outputDeliverLeaflet   = World.NewExternalActionChunk(CreatureActions.PREPARE_TO_DELIVER_LEAFLET.ToString());
            outputStop             = World.NewExternalActionChunk(CreatureActions.STOP.ToString());

            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            Console.WriteLine("Agent started");
        }
Пример #3
0
        public ClarionAgent(WorldServer nws, String creature_ID, String creature_Name)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            //mind = new Mind();
            //mind.Show();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Initialize Input Information
            inputWallAhead            = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD);
            inputFoodAhead            = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD);
            inputLeafletJewelAhead    = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_LEAFLET_JEWEL_AHEAD);
            inputNonLeafletJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_NON_LEAFLET_JEWEL_AHEAD);
            inputCloseObjectAhead     = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_CLOSE_OBJECT_AHEAD);
            inputHasCompletedLeaflet  = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_HAS_COMPLETED_LEAFLET);


            // Initialize Output actions
            outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());
            outputGoAhead         = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString());
            outputEat             = World.NewExternalActionChunk(CreatureActions.EAT.ToString());
            outputHide            = World.NewExternalActionChunk(CreatureActions.HIDE.ToString());
            outputSack            = World.NewExternalActionChunk(CreatureActions.SACK.ToString());
            outputStop            = World.NewExternalActionChunk(CreatureActions.STOP.ToString());

            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            Console.WriteLine("Agent started");
        }
Пример #4
0
        private void CognitiveCycle(object obj)
        {
            Console.WriteLine("Starting Cognitive Cycle ... press CTRL-C to finish !");
            // Cognitive Cycle starts here getting sensorial information
            while (CurrentCognitiveCycle != MaxNumberOfCognitiveCycles)
            {
                // Get current sensory information
                IList <Thing> currentSceneInWS3D = processSensoryInformation();

                // Make the perception
                SensoryInformation si = prepareSensoryInformation(currentSceneInWS3D);

                //Perceive the sensory information
                CurrentAgent.Perceive(si);

                //Choose an action
                ExternalActionChunk chosen = CurrentAgent.GetChosenExternalAction(si);

                // Get the selected action
                String          actionLabel = chosen.LabelAsIComparable.ToString();
                CreatureActions actionType  = (CreatureActions)Enum.Parse(typeof(CreatureActions), actionLabel, true);

                // Call the output event handler
                processSelectedAction(actionType);

                // Increment the number of cognitive cycles
                CurrentCognitiveCycle++;

                //Wait to the agent accomplish his job
                if (TimeBetweenCognitiveCycles > 0)
                {
                    Thread.Sleep(TimeBetweenCognitiveCycles);
                }
            }
        }
Пример #5
0
 public double ComputePayoff(int agnt, ExternalActionChunk ac, ExternalActionChunk bc)
 {
     if (ac == sayCooperate)
     {
         if (bc == sayCooperate)
         {
             return((double)payoff[agnt, _COOPERATE, _COOPERATE] / (double)maxpay);
         }
         else
         {
             return((double)payoff[agnt, _COOPERATE, _DEFECT] / (double)maxpay);
         }
     }
     else
     {
         if (bc == sayCooperate)
         {
             return((double)payoff[agnt, _DEFECT, _COOPERATE] / (double)maxpay);
         }
         else
         {
             return((double)payoff[agnt, _DEFECT, _DEFECT] / (double)maxpay);
         }
     }
 }
Пример #6
0
 public void TallyResults(int itrial, ExternalActionChunk ac, ExternalActionChunk bc)
 {
     if (ac == sayCooperate)
     {
         if (bc == sayCooperate)
         {
             results[itrial, _COOPERATE, _COOPERATE]++;
         }
         else
         {
             results[itrial, _COOPERATE, _DEFECT]++;
         }
     }
     else
     {
         if (bc == sayCooperate)
         {
             results[itrial, _DEFECT, _COOPERATE]++;
         }
         else
         {
             results[itrial, _DEFECT, _DEFECT]++;
         }
     }
 }
Пример #7
0
        /// <summary>
        /// Performs reasoning using a "noisy" input based on each pattern
        /// </summary>
        /// <param name="reasoner">The reasoner who is performing the reasoning</param>
        static void Run(Agent reasoner)
        {
            int pcounter = 0;

            //Iterates through each pattern
            foreach (DeclarativeChunk dc in chunks)
            {
                //Gets an input to use for reasoning. Note that the World.GetSensoryInformation method can also be used here
                ExternalActionChunk chosen = null;

                ++pcounter;
                Console.Write("Presenting degraded pattern ");
                Console.WriteLine(pcounter);

                int state_counter = 1;
                while (chosen == null || chosen == ExternalActionChunk.DO_NOTHING)
                {
                    SensoryInformation si = World.NewSensoryInformation(reasoner);
                    si.Add(World.GetDimensionValuePair("state", state_counter), 1);

                    int count = 0;
                    //Sets up the input
                    foreach (DimensionValuePair dv in dvs)
                    {
                        if (((double)count / (double)dc.Count < (1 - noise)))
                        {
                            if (dc.Contains(dv))
                            {
                                si.Add(dv, 1);
                                ++count;
                            }
                            else
                            {
                                si.Add(dv, 0);
                            }
                        }
                        else
                        {
                            si.Add(dv, 0);                                   //Zeros out the dimension-value pair if "above the noise level"
                        }
                    }

                    reasoner.Perceive(si);
                    chosen = reasoner.GetChosenExternalAction(si);

                    if (reasoner.GetInternals(Agent.InternalWorldObjectContainers.WORKING_MEMORY).Count() > 0)
                    {
                        state_counter = 3;
                    }
                    else
                    {
                        state_counter = 2;
                    }
                }
                Console.Write("Is this pattern 2? Agent says: ");
                Console.WriteLine(chosen.LabelAsIComparable);
                reasoner.ResetWorkingMemory();
            }
        }
Пример #8
0
        public void SetNeuralNetworkOutputNodes()
        {
            ExternalActionChunk Predict_CK1 = World.NewExternalActionChunk("CK1");
            ExternalActionChunk Predict_CK2 = World.NewExternalActionChunk("CK2");

            NeuralNetwork.Output.Add(Predict_CK1);
            NeuralNetwork.Output.Add(Predict_CK2);
        }
Пример #9
0
        public void SetNeuralNetworkOutputNodes()
        {
            ExternalActionChunk Predict_PKB  = World.NewExternalActionChunk("PKB");
            ExternalActionChunk Predict_AKT1 = World.NewExternalActionChunk("AKT1");
            ExternalActionChunk Predict_AKT2 = World.NewExternalActionChunk("AKT2");

            NeuralNetwork.Output.Add(Predict_PKB);
            NeuralNetwork.Output.Add(Predict_AKT1);
            NeuralNetwork.Output.Add(Predict_AKT2);
        }
Пример #10
0
        public void SetNeuralNetworkOutputNodes()
        {
            ExternalActionChunk Predict_PKA = World.NewExternalActionChunk("PKA");
            ExternalActionChunk Predict_PKG = World.NewExternalActionChunk("PKG");
            ExternalActionChunk Predict_PKC = World.NewExternalActionChunk("PKC");

            NeuralNetwork.Output.Add(Predict_PKA);
            NeuralNetwork.Output.Add(Predict_PKG);
            NeuralNetwork.Output.Add(Predict_PKC);
        }
Пример #11
0
        /// <summary>
        /// The event handler for new external action chosen events
        /// </summary>
        protected override void ProcessChosenExternalAction(Agent actor, ExternalActionChunk chosenAction, SensoryInformation relatedSI,
                                                            Dictionary <ActionChunk, double> finalActionActivations, long performedAt, long responseTime)
        {
            if ((bool)chosenAction.LabelAsIComparable)
            {
                //The agent said "True".
                if ((relatedSI["Boolean 1", true] == John.Parameters.MAX_ACTIVATION &&
                     relatedSI["Boolean 2", false] == John.Parameters.MAX_ACTIVATION) ||
                    (relatedSI["Boolean 1", false] == John.Parameters.MAX_ACTIVATION &&
                     relatedSI["Boolean 2", true] == John.Parameters.MAX_ACTIVATION))
                {
                    //The agent responded correctly
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                    //Record the agent's success.
                    CorrectCounter++;
                    //Give positive feedback.
                    John.ReceiveFeedback(relatedSI, 1.0);
                }
                else
                {
                    //The agent responded incorrectly
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                    //Give negative feedback.
                    John.ReceiveFeedback(relatedSI, 0.0);
                }
            }
            else
            {
                //The agent said "False".
                if ((relatedSI["Boolean 1", true] == John.Parameters.MAX_ACTIVATION &&
                     relatedSI["Boolean 2", true] == John.Parameters.MAX_ACTIVATION) ||
                    (relatedSI["Boolean 1", false] == John.Parameters.MAX_ACTIVATION &&
                     relatedSI["Boolean 2", false] == John.Parameters.MAX_ACTIVATION))
                {
                    //The agent responded correctly
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                    //Record the agent's success.
                    CorrectCounter++;
                    //Give positive feedback.
                    John.ReceiveFeedback(relatedSI, 1.0);
                }
                else
                {
                    //The agent responded incorrectly
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                    //Give negative feedback.
                    John.ReceiveFeedback(relatedSI, 0.0);
                }
            }

            trialWaitHold.Set();
        }
Пример #12
0
        public Boolean GetPredictionResultStatus(ExternalActionChunk ActionChunkChoosen, String ActualKinease)
        {
            Boolean ResultFlag = false;

            if (ActionChunkChoosen == World.GetActionChunk("PKA"))
            {
                if (ActualKinease.Equals("PKA"))
                {
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "MAKaey Responded Correctly");
                    MAKaey.ReceiveFeedback(sensInfo, 1.0);
                    ResultFlag = true;
                }
                else
                {
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "MAKaey Responded In-correctly");
                    MAKaey.ReceiveFeedback(sensInfo, 0.0);
                    ResultFlag = false;
                }
            }
            else if (ActionChunkChoosen == World.GetActionChunk("PKG"))
            {
                if (ActualKinease.Equals("PKG"))
                {
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "MAKaey Responded Correctly");
                    MAKaey.ReceiveFeedback(sensInfo, 1.0);
                    ResultFlag = true;
                }
                else
                {
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "MAKaey Responded In-correctly");
                    MAKaey.ReceiveFeedback(sensInfo, 0.0);
                    ResultFlag = false;
                }
            }
            else if (ActionChunkChoosen == World.GetActionChunk("PKC"))
            {
                if (ActualKinease.Equals("PKC"))
                {
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "MAKaey Responded Correctly");
                    MAKaey.ReceiveFeedback(sensInfo, 1.0);
                    ResultFlag = true;
                }
                else
                {
                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "MAKaey Responded In-correctly");
                    MAKaey.ReceiveFeedback(sensInfo, 0.0);
                    ResultFlag = false;
                }
            }

            return(ResultFlag);
        }
Пример #13
0
        public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name, List <Leaflet> leafletList)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            mind         = new MindViewer();
            mind.Show();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Initialize Input Information
            inputWallCreatureAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD);
            inputFoodAhead         = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD);
            inputJewelAhead        = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD);
            inputJewelInVision     = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_IN_VISION);
            inputFuelLow           = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FUEL_LOW);

            // Initialize Output actions
            outputRotateClockwise   = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());
            outputGoAhead           = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString());
            outputEatFood           = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString());
            outputGetJewel          = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString());
            outputHideJewel         = World.NewExternalActionChunk(CreatureActions.HIDE_JEWEL.ToString());
            outputGoalAchieved      = World.NewExternalActionChunk(CreatureActions.GOAL_ACHIEVED.ToString());
            outputGoToJewelInVision = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString());
            outputFuelLow           = World.NewExternalActionChunk(CreatureActions.FUEL_LOW.ToString());

            //List<Thing> listThing = nws.SendGetCreatureState (creature_ID);

            //IList<Thing> response = null;

            //if (worldServer != null && worldServer.IsConnected) {
            //	response = worldServer.SendGetCreatureState (creatureName);
            //		Creature cc = (Creature)response [0];
            //		leaflet1.leafletID = cc.leaflets [0].leafletID;
            //		leaflet2.leafletID = cc.leaflets [1].leafletID;
            //		leaflet3.leafletID = cc.leaflets [2].leafletID;
            //		Console.WriteLine ("Creature found: " + cc.Name);
            //		Console.WriteLine ("LF1: " + cc.leaflets [0].leafletID);
            //		Console.WriteLine ("LF2: " + cc.leaflets [1].leafletID);
            //		Console.WriteLine ("LF3: " + cc.leaflets [2].leafletID);
            //}

            // Load leaflet control
            loadLeafletsControl(leafletList);
            mind.loadLeaflet(leafletList[0], leafletList[1], leafletList[2]);

            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            //Console.WriteLine ("Agent started");
        }
Пример #14
0
        private void Initialize()
        {
            World.LoggingLevel = TraceLevel.Off;

            p1 = World.NewDimensionValuePair("Peg", 1);
            p2 = World.NewDimensionValuePair("Peg", 2);
            p3 = World.NewDimensionValuePair("Peg", 3);
            p4 = World.NewDimensionValuePair("Peg", 4);
            p5 = World.NewDimensionValuePair("Peg", 5);

            mp1 = World.NewExternalActionChunk();
            mp2 = World.NewExternalActionChunk();
            mp3 = World.NewExternalActionChunk();
            mp4 = World.NewExternalActionChunk();
            mp5 = World.NewExternalActionChunk();

            mp1 += p1;
            mp2 += p2;
            mp3 += p3;
            mp4 += p4;
            mp5 += p5;

            John = World.NewAgent();

            net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory);

            net.Input.Add(p1);
            net.Input.Add(p2);
            net.Input.Add(p3);
            net.Input.Add(p4);
            net.Input.Add(p5);

            net.Output.Add(mp1);
            net.Output.Add(mp2);
            net.Output.Add(mp3);
            net.Output.Add(mp4);
            net.Output.Add(mp5);

            net.Parameters.LEARNING_RATE = 1;
            net.Parameters.MOMENTUM      = .01;

            John.Commit(net);

            RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.01;
            RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.4;
        }
Пример #15
0
        private void CognitiveCycle(object obj)
        {
            Console.WriteLine("Starting Cognitive Cycle ... press CTRL-C to finish !");
            // Cognitive Cycle starts here getting sensorial information
            var watch = System.Diagnostics.Stopwatch.StartNew();

            // the code that you want to measure comes here
            while (CurrentCognitiveCycle != MaxNumberOfCognitiveCycles)
            {
                Console.WriteLine("Creature: Clarion - Remaining Jewel: " + getJewelRemainingTotal());
                // Get current sensory information
                IList <Thing> currentSceneInWS3D = processSensoryInformation();

                // Make the perception
                SensoryInformation si = prepareSensoryInformation(currentSceneInWS3D);

                //Perceive the sensory information
                CurrentAgent.Perceive(si);

                //Choose an action
                ExternalActionChunk chosen = CurrentAgent.GetChosenExternalAction(si);

                // Get the selected action
                String          actionLabel = chosen.LabelAsIComparable.ToString();
                CreatureActions actionType  = (CreatureActions)Enum.Parse(typeof(CreatureActions), actionLabel, true);

                // Increment the number of cognitive cycles
                CurrentCognitiveCycle++;

                // Call the output event handler
                processSelectedAction(actionType);

                //Wait to the agent accomplish his job
                if (TimeBetweenCognitiveCycles > 0)
                {
                    Thread.Sleep(TimeBetweenCognitiveCycles);
                }
            }
            watch.Stop();
            var elapsedMs = watch.ElapsedMilliseconds / 1000;

            Console.WriteLine("Clarion completed time: " + elapsedMs);
        }
Пример #16
0
        public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            mind         = new MindViewer();
            mind.Show();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Initialize Input Information
            inputWallCreatureAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD);
            inputFoodAhead         = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD);
            inputJewelAhead        = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD);
            inputJewelInVision     = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_IN_VISION);

            // Initialize Output actions
            outputRotateClockwise   = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());
            outputGoAhead           = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString());
            outputEatFood           = World.NewExternalActionChunk(CreatureActions.EAT_FOOD.ToString());
            outputGetJewel          = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString());
            outputHideJewel         = World.NewExternalActionChunk(CreatureActions.HIDE_JEWEL.ToString());
            outputGoalAchieved      = World.NewExternalActionChunk(CreatureActions.GOAL_ACHIEVED.ToString());
            outputGoToJewelInVision = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString());

            /*List<Thing> thingList = nws.SendGetCreatureState (creature_ID);
             * foreach(Thing t in thingList) {
             *      Console.WriteLine ("Thing: ");
             *      Console.Write (t);
             * }*/



            // Define jewels out of scope
            jewelOutOfScope.Add("Orange");
            jewelOutOfScope.Add("DarkGray_Spoiled");

            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            Console.WriteLine("Agent started");
        }
Пример #17
0
        // To indicate we are done.
        //Boolean allJewelsCollected = false;
        //Boolean reachedDeliverySpot = false;
        #endregion

        #region Constructor
        public ClarionAgent(WSProxy nws, String creature_ID, String creature_Name)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            mind         = new MindViewer();
            mind.Show();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Create a (fake) delivery spot.
            // It seems WSProxy.cs does not support it, so place it at (0,0).
            deliverySpot    = new Thing();
            deliverySpot.X1 = 0;
            deliverySpot.Y1 = 0;

            // Initialize Input Information
            inputWallAhead          = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD);
            inputJewelAhead         = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD);
            inputFoodAhead          = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD);
            inputJewelAway          = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AWAY);
            inputFoodAway           = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AWAY);
            inputAllJewelsCollected = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_ALL_JEWELS_COLLECTED);
            inputCreatureCanDeliver = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_CREATURE_CAN_DELIVER);

            // Initialize Output actions
            outputRotateClockwise  = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());
            outputGetJewel         = World.NewExternalActionChunk(CreatureActions.GET_JEWEL.ToString());
            outputGetFood          = World.NewExternalActionChunk(CreatureActions.GET_FOOD.ToString());
            outputGoToJewel        = World.NewExternalActionChunk(CreatureActions.GO_TO_JEWEL.ToString());
            outputGoToFood         = World.NewExternalActionChunk(CreatureActions.GO_TO_FOOD.ToString());
            outputWander           = World.NewExternalActionChunk(CreatureActions.WANDER.ToString());
            outputGoToDeliverySpot = World.NewExternalActionChunk(CreatureActions.GO_TO_DELIVER.ToString());
            outputDoDelivery       = World.NewExternalActionChunk(CreatureActions.DELIVER.ToString());

            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            Console.WriteLine("Agent started");
        }
Пример #18
0
        /// <summary>
        /// Output action that makes the agent go ahead
        /// </summary>

        #endregion

        #endregion

        #region Constructor
        public ClarionAgent(WorldServer nws, String creature_ID, String creature_Name)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            mind         = new Mind();
            mind.Show();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Initialize Input Information
            inputAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_AHEAD);

            inputGoItem = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_GO_ITEM);

            inputSackItItem = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_SACKIT_ITEM);

            inputEatItem = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_EAT_ITEM);

            inputStopCreature = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_STOP_CREATURE);

            // Initialize Output actions
            outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());

            outputGoItem = World.NewExternalActionChunk(CreatureActions.GO_ITEM.ToString());

            outputSackItItem = World.NewExternalActionChunk(CreatureActions.SACKIT_ITEM.ToString());

            outputEatItem = World.NewExternalActionChunk(CreatureActions.EAT_ITEM.ToString());

            outputStopCreature = World.NewExternalActionChunk(CreatureActions.STOP_CREATURE.ToString());


            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            Console.WriteLine("Agent started");
        }
Пример #19
0
        /// </summary>
        /// <param name="nws"></param>
        /// <param name="creature_ID"></param>
        /// <param name="creature_Name"></param>
        #endregion


        #region Constructor
        public ClarionAgent(WorldServer nws, String creature_ID, String creature_Name)
        {
            worldServer = nws;
            // Initialize the agent
            CurrentAgent = World.NewAgent("Current Agent");
            // FMT 15/06 - temp disable mind
            //mind = new Mind();
            //mind.Show ();
            creatureId   = creature_ID;
            creatureName = creature_Name;

            // Initialize Input Information
            inputWallAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_WALL_AHEAD);
            // FMT 29/04/2017
            inputFoodAhead         = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_FOOD_AHEAD);
            inputJewelAhead        = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_AHEAD);
            inputLeafletJewelAhead = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_LEAFLET_JEWEL_AHEAD);
            inputJewelHide         = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_JEWEL_HIDE);
            inputEnergyLow         = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_ENERGY_LOW);
            inputCloseObject       = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_CLOSE_OBJECT);
            inputLeafletComplete   = World.NewDimensionValuePair(SENSOR_VISUAL_DIMENSION, DIMENSION_LEAFLET_COMPLETE);

            // Initialize Output actions
            outputRotateClockwise = World.NewExternalActionChunk(CreatureActions.ROTATE_CLOCKWISE.ToString());
            outputGoAhead         = World.NewExternalActionChunk(CreatureActions.GO_AHEAD.ToString());
            // FMT 29/04/2017
            outputEat  = World.NewExternalActionChunk(CreatureActions.EAT.ToString());
            outputGet  = World.NewExternalActionChunk(CreatureActions.GET.ToString());
            outputGoTo = World.NewExternalActionChunk(CreatureActions.GO_TO.ToString());
            outputHide = World.NewExternalActionChunk(CreatureActions.HIDE.ToString());
            outputStop = World.NewExternalActionChunk(CreatureActions.STOP.ToString());

            //Create thread to simulation
            runThread = new Thread(CognitiveCycle);
            Console.WriteLine("Agent started");
        }
Пример #20
0
        /*
         * public static void main(String [] args)
         * {
         *  String ProteinSequence = "MENEREKQVYLAKLSEQTERYDEMVEAMKKVAQLDVELTVEERNLVSVGYKNVIGARRASWRILSSIEQKEESKGNDENVKRLKNYRKRVEDELAKVCNDILSVIDKHLIPSSNAVESTVFFYKMKGDYYRYLAEFSSGAERKEAADQSLEAYKAAVAAAENGLAPTHPVRLGLALNFSVFYYEILNSPESACQLAKQAFDDAIAELDSLNEESYKDSTLIMQLLRDNLTLWTSDLNEEGDERTKGADEPQDEN";
         *
         *  AminoAcid[] StandardProteinSequence = getStandardProteinSequence(ProteinSequence);
         *  List<Peptide> Peptides = getPeptideList(StandardProteinSequence);
         *  Peptides = Train_Clarion_Model_AGC_Group(Peptides);
         *
         *  Console.WriteLine("");
         *  Peptides = Train_Clarion_Model_PKB_Family(Peptides);
         *
         *  Console.WriteLine("");
         *  Peptides = Train_Clarion_Model_CaseinKinase_Family(Peptides);
         *
         *  Console.WriteLine("");
         *
         *  for (int PeptideCount = 0; PeptideCount < Peptides.Count; PeptideCount++)
         *  {
         *      for (int KinaseCount = 0; KinaseCount < Peptides.ElementAt(PeptideCount).getModifKinases().Count; KinaseCount++)
         *      {
         *          if (Peptides.ElementAt(PeptideCount).getModifKinases().ElementAt(KinaseCount).getStatus() == true)
         *          {
         *              Console.WriteLine("" + Peptides.ElementAt(PeptideCount).toString() + "\t" + Peptides.ElementAt(PeptideCount).getModifKinases().ElementAt(KinaseCount).getKinaseName()+"\tYES");
         *          }
         *          else
         *          {
         *              Console.WriteLine("" + Peptides.ElementAt(PeptideCount).toString() + "\t" + Peptides.ElementAt(PeptideCount).getModifKinases().ElementAt(KinaseCount).getKinaseName() + "\t---");
         *          }
         *      }
         *
         *      Console.WriteLine("");
         *  }
         * }
         *
         */
        public static List <Peptide> Train_Clarion_Model_CaseinKinase_Family(List <Peptide> Peptides)
        {
            int CorrectCount = 0, Accuracy = 0;

            List <Protein> TrainingProteins = getProteinRecordsFromTrainingDataset(Dataset_File_Path_CaseinKinase_Family);
            List <Protein> TestingProteins  = getProteinRecordsFromTestingDataset(Dataset_File_Path_CaseinKinase_Family);
            List <Protein> CompleteDataset  = new List <Protein>();

            CompleteDataset.AddRange(TrainingProteins);
            CompleteDataset.AddRange(TestingProteins);
            CompleteDataset.ShuffleDatasetRecords();

            String  ActualKineaseName       = "";
            int     NumberOfInputs          = CompleteDataset.ElementAt(0).getModifiedResidues().ElementAt(0).getModificationPeptide().getPeptAminoAcids().Length;
            Peptide PeptideGivenToAgent;

            Clarion_CK_Model ClarionTask = new Clarion_CK_Model();

            ClarionTask.SetNeuralNetworkInputNodes(NumberOfInputs);
            ClarionTask.SetNeuralNetworkOutputNodes();
            ClarionTask.SetAgentParameters();

            while (Accuracy < 40)
            {
                CorrectCount = 0;

                for (int ProteinCount = 0; ProteinCount < CompleteDataset.Count; ProteinCount++)
                {
                    ClarionTask.RefreshSensoryInformation();
                    PeptideGivenToAgent = CompleteDataset.ElementAt(ProteinCount).getModifiedResidues().ElementAt(0).getModificationPeptide();
                    ActualKineaseName   = CompleteDataset.ElementAt(ProteinCount).getModifiedResidues().ElementAt(0).getKineaseName();

                    ClarionTask.SetValuesOfNeuralNetworkInputLayer(PeptideGivenToAgent);

                    ClarionTask.PerceiveSensoryInformation();

                    ExternalActionChunk ActionChunkChoosen = ClarionTask.GetChoosenActionChunk();

                    if ((ClarionTask.GetPredictionResultStatus(ActionChunkChoosen, ActualKineaseName)))
                    {
                        CorrectCount = CorrectCount + 1;
                    }
                }

                Accuracy = (int)(((double)(CorrectCount) / (double)(CompleteDataset.Count)) * (100));
            }

            String PredictedKinase = "";

            for (int PeptideCount = 0; PeptideCount < Peptides.Count; PeptideCount++)
            {
                ClarionTask.RefreshSensoryInformation();
                PeptideGivenToAgent = Peptides.ElementAt(PeptideCount);

                ClarionTask.SetValuesOfNeuralNetworkInputLayer(PeptideGivenToAgent);

                ClarionTask.PerceiveSensoryInformation();

                ExternalActionChunk ActionChunkChoosen = ClarionTask.GetChoosenActionChunk();

                if (ActionChunkChoosen == World.GetActionChunk("CK1"))
                {
                    PredictedKinase = "CK1";
                    Peptides.ElementAt(PeptideCount).setKinaseStatus(PredictedKinase);
                }
                else if (ActionChunkChoosen == World.GetActionChunk("CK2"))
                {
                    PredictedKinase = "CK2";
                    Peptides.ElementAt(PeptideCount).setKinaseStatus(PredictedKinase);
                }
            }

            return(Peptides);
        }
Пример #21
0
        static void Main(string[] args)
        {
            //Initialize the task
            Console.WriteLine("Initializing the Full Hello World Task");

            int CorrectCounter = 0;
            int NumberTrials   = 20000;

            Random rand = new Random();

            World.LoggingLevel = TraceLevel.Off;

            int progress = 0;

            TextWriter   orig = Console.Out;
            StreamWriter sw   = File.CreateText("HelloWorldFull.txt");

            DimensionValuePair hi  = World.NewDimensionValuePair("Salutation", "Hello");
            DimensionValuePair bye = World.NewDimensionValuePair("Salutation", "Goodbye");

            ExternalActionChunk sayHi  = World.NewExternalActionChunk("Hello");
            ExternalActionChunk sayBye = World.NewExternalActionChunk("Goodbye");

            GoalChunk salute      = World.NewGoalChunk("Salute");
            GoalChunk bidFarewell = World.NewGoalChunk("Bid Farewell");

            //Initialize the Agent
            Agent John = World.NewAgent("John");

            SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory);

            net.Input.Add(salute, "goals");
            net.Input.Add(bidFarewell, "goals");

            net.Input.Add(hi);
            net.Input.Add(bye);

            net.Output.Add(sayHi);
            net.Output.Add(sayBye);

            net.Parameters.LEARNING_RATE = 1;

            John.Commit(net);

            John.ACS.Parameters.VARIABLE_BL_BETA  = .5;
            John.ACS.Parameters.VARIABLE_RER_BETA = .5;
            John.ACS.Parameters.VARIABLE_IRL_BETA = 0;
            John.ACS.Parameters.VARIABLE_FR_BETA  = 0;

            RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.6;
            RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.1;
            RefineableActionRule.GlobalParameters.INFORMATION_GAIN_OPTION    = RefineableActionRule.IGOptions.PERFECT;

            AffiliationBelongingnessDrive ab = AgentInitializer.InitializeDrive(John, AffiliationBelongingnessDrive.Factory, rand.NextDouble(), (DeficitChangeProcessor)HelloWorldFull_DeficitChange);

            DriveEquation abd = AgentInitializer.InitializeDriveComponent(ab, DriveEquation.Factory);

            ab.Commit(abd);

            John.Commit(ab);

            AutonomyDrive aut = AgentInitializer.InitializeDrive(John, AutonomyDrive.Factory, rand.NextDouble(), (DeficitChangeProcessor)HelloWorldFull_DeficitChange);

            DriveEquation autd =
                AgentInitializer.InitializeDriveComponent(aut, DriveEquation.Factory);

            aut.Commit(autd);

            John.Commit(aut);

            GoalSelectionModule gsm =
                AgentInitializer.InitializeMetaCognitiveModule(John, GoalSelectionModule.Factory);

            GoalSelectionEquation gse =
                AgentInitializer.InitializeMetaCognitiveDecisionNetwork(gsm, GoalSelectionEquation.Factory);

            gse.Input.Add(ab.GetDriveStrength());
            gse.Input.Add(aut.GetDriveStrength());

            GoalStructureUpdateActionChunk su = World.NewGoalStructureUpdateActionChunk();
            GoalStructureUpdateActionChunk bu = World.NewGoalStructureUpdateActionChunk();

            su.Add(GoalStructure.RecognizedActions.SET_RESET, salute);
            bu.Add(GoalStructure.RecognizedActions.SET_RESET, bidFarewell);

            gse.Output.Add(su);
            gse.Output.Add(bu);

            gsm.SetRelevance(su, ab, 1);
            gsm.SetRelevance(bu, aut, 1);

            gsm.Commit(gse);

            John.Commit(gsm);

            John.MS.Parameters.CURRENT_GOAL_ACTIVATION_OPTION =
                MotivationalSubsystem.CurrentGoalActivationOptions.FULL;

            //Run the task
            Console.WriteLine("Running the Full Hello World Task");
            Console.SetOut(sw);

            SensoryInformation si;

            ExternalActionChunk chosen;

            for (int i = 0; i < NumberTrials; i++)
            {
                si = World.NewSensoryInformation(John);

                si[AffiliationBelongingnessDrive.MetaInfoReservations.STIMULUS, typeof(AffiliationBelongingnessDrive).Name] = 1;
                si[AutonomyDrive.MetaInfoReservations.STIMULUS, typeof(AutonomyDrive).Name] = 1;

                //Randomly choose an input to perceive.
                if (rand.NextDouble() < .5)
                {
                    //Say "Hello"
                    si.Add(hi, John.Parameters.MAX_ACTIVATION);
                    si.Add(bye, John.Parameters.MIN_ACTIVATION);
                }
                else
                {
                    //Say "Goodbye"
                    si.Add(hi, John.Parameters.MIN_ACTIVATION);
                    si.Add(bye, John.Parameters.MAX_ACTIVATION);
                }

                //Perceive the sensory information
                John.Perceive(si);

                //Choose an action
                chosen = John.GetChosenExternalAction(si);

                //Deliver appropriate feedback to the agent
                if (chosen == sayHi)
                {
                    //The agent said "Hello".
                    if (si[hi] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }
                else
                {
                    //The agent said "Goodbye".
                    if (si[bye] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }

                Console.SetOut(orig);
                progress           = (int)(((double)(i + 1) / (double)NumberTrials) * 100);
                Console.CursorLeft = 0;
                Console.Write(progress + "% Complete..");
                Console.SetOut(sw);
            }

            //Report Results

            Console.WriteLine("Reporting Results for the Full Hello World Task");
            Console.WriteLine("John got " + CorrectCounter + " correct out of " + NumberTrials + " trials (" +
                              (int)Math.Round(((double)CorrectCounter / (double)NumberTrials) * 100) + "%)");

            Console.WriteLine("At the end of the task, John had learned the following rules:");
            foreach (var i in John.GetInternals(Agent.InternalContainers.ACTION_RULES))
            {
                Console.WriteLine(i);
            }

            sw.Close();
            Console.SetOut(orig);
            Console.CursorLeft = 0;
            Console.WriteLine("100% Complete..");
            //Kill the agent to end the task
            Console.WriteLine("Killing John to end the program");
            John.Die();
            Console.WriteLine("John is Dead");

            Console.WriteLine("The Full Hello World Task has finished");
            Console.WriteLine("The results have been saved to \"HelloWorldFull.txt\"");
            Console.Write("Press any key to exit");
            Console.ReadKey(true);
        }
Пример #22
0
        public void Initialize(Groups group)
        {
            World.Initialize();
            John = World.NewAgent();

            QBPNetwork idn = AgentInitializer.InitializeImplicitDecisionNetwork(John, QBPNetwork.Factory);

            World.NewDimensionValuePair("Target P", target);
            World.NewDimensionValuePair("Current P", target);
            World.NewExternalActionChunk(target);

            for (double i = 0; i < 12; i++)
            {
                if (World.GetDimensionValuePair("Target P", i) == null)
                {
                    idn.Input.Add(World.NewDimensionValuePair("Target P", i));
                    idn.Input.Add(World.NewDimensionValuePair("Current P", i));
                    idn.Input.Add(World.NewExternalActionChunk(i));
                    idn.Output.Add(World.GetActionChunk(i));
                }
                else
                {
                    idn.Input.Add(World.GetDimensionValuePair("Target P", i));
                    idn.Input.Add(World.GetDimensionValuePair("Current P", i));
                    idn.Input.Add(World.GetActionChunk(i));
                    idn.Output.Add(World.GetActionChunk(i));
                }
            }

            foreach (double i in As)
            {
                World.NewDimensionValuePair("A", i);
            }
            foreach (double i in Bs)
            {
                World.NewDimensionValuePair("B", i);
            }
            foreach (double i in Cs)
            {
                World.NewDimensionValuePair("C", i);
            }

            switch (group)
            {
            case Groups.VERBALIZATION:
                idn.Parameters.POSITIVE_MATCH_THRESHOLD = 1;
                RefineableActionRule.GlobalParameters.POSITIVE_MATCH_THRESHOLD   = 1;
                RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = 1;
                RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = .5;
                threshold_4 = .5;
                break;

            case Groups.MEMORY:
                for (double i = 0; i < 12; i++)
                {
                    ExternalActionChunk w = (ExternalActionChunk)World.GetActionChunk((double)rand.Next(12));
                    var p = World.GetDimensionValuePair("Current P", FactoryOutput(i, (double)w.LabelAsIComparable));
                    ExternalActionChunk w1  = (ExternalActionChunk)World.GetActionChunk(Math.Round((target + p.Value + NoiseOptions[rand.Next(3)]) / 2));
                    FixedRule           mfr = AgentInitializer.InitializeActionRule(John, FixedRule.Factory, w1, MemoryGroup_SupportCalculator);

                    mfr.GeneralizedCondition.Add(p, true);
                    mfr.GeneralizedCondition.Add(w, true);
                    John.Commit(mfr);
                }
                goto default;

            case Groups.SIMPLE_RULE:
                for (double i = 0; i < 12; i++)
                {
                    FixedRule sfr = AgentInitializer.InitializeActionRule(John, FixedRule.Factory, World.GetActionChunk(i), SimpleRule_SupportCalculator);
                    John.Commit(sfr);
                }
                goto default;

            default:
                idn.Parameters.LEARNING_RATE = .05;
                idn.Parameters.DISCOUNT      = .95;
                John.ACS.Parameters.SELECTION_TEMPERATURE = .09;
                idn.Parameters.POSITIVE_MATCH_THRESHOLD   = 1;
                RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = 2;
                RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = 1.2;
                RefineableActionRule.GlobalParameters.POSITIVE_MATCH_THRESHOLD   = 1;
                threshold_4 = .2;
                break;
            }

            RefineableActionRule.GlobalParameters.INFORMATION_GAIN_OPTION = RefineableActionRule.IGOptions.PERFECT;

            John.Commit(idn);
        }
Пример #23
0
        public static void Test(Groups g, int a)
        {
            Console.Write("Performing Task...");
            int [,] shuffler = new int[8, 8];
            bgErr[g].Add(0);
            btErr[g].Add(0);
            wgErr[g].Add(0);
            wtErr[g].Add(0);
            List <DeclarativeChunk> primes = new List <DeclarativeChunk>();

            primes.AddRange(white_faces);
            primes.AddRange(black_faces);

            List <DeclarativeChunk> targets = new List <DeclarativeChunk>();

            targets.AddRange(guns);
            targets.AddRange(tools);

            for (int i = 0; i < numTestTrials; i++)
            {
                int p = r.Next(8);
                int t = r.Next(8);

                while (shuffler[p, t] == 2)
                {
                    p = r.Next(8);
                    t = r.Next(8);
                }

                shuffler[p, t]++;

                SensoryInformation si = World.NewSensoryInformation(Participant);
                si.AddRange(primes[p], 1);
                si.Add(targets[t], 1);

                si[Drive.MetaInfoReservations.STIMULUS, typeof(HonorDrive).Name] = (double)1 / (double)5;

                Participant.Perceive(si);
                ExternalActionChunk chosen = Participant.GetChosenExternalAction(si);

                if ((chosen.LabelAsIComparable.Equals("Tool") && !tools.Contains(targets[t])))
                {
                    //The participant made an inaccurate judgment on a gun trial

                    if (si.Contains(World.GetDimensionValuePair("SkinColor", "Black")))                                 //The error was on a black trial
                    {
                        bgErr[g][a]++;
                    }
                    else
                    {
                        wgErr[g][a]++;
                    }
                }
                else if ((chosen.LabelAsIComparable.Equals("Gun") && !guns.Contains(targets[t])))
                {
                    //The participant made an inaccurate judgment on a tool trial

                    if (si.Contains(World.GetDimensionValuePair("SkinColor", "Black")))                                 //The error was on a black trial
                    {
                        btErr[g][a]++;
                    }
                    else
                    {
                        wtErr[g][a]++;
                    }
                }
            }
            Console.WriteLine("Finished");
        }
Пример #24
0
        public static void Run()
        {
            Random             rand = new Random();
            SensoryInformation si;
            int    block_count = 0;
            double r;

            do
            {
                CorrectCounter = 0;
                ++block_count;
                //Run the task for the specified number of trials.
                for (int i = 0; i < NumberTrials; i++)
                {
                    Console.CursorLeft = 0;
                    Console.Out.Write("Running Trial #" + (i + 1) + " of Block #" + block_count);
                    r  = rand.NextDouble();
                    si = World.NewSensoryInformation(John);

                    //Randomly choose an input to perceive.
                    if (r < .25)
                    {
                        //True:True
                        si.Add(World.GetDimensionValuePair("Boolean 1", true), 1);
                        si.Add(World.GetDimensionValuePair("Boolean 1", false), 0);
                        si.Add(World.GetDimensionValuePair("Boolean 2", true), 1);
                        si.Add(World.GetDimensionValuePair("Boolean 2", false), 0);
                    }
                    else if (r < .5)
                    {
                        //True:False
                        si.Add(World.GetDimensionValuePair("Boolean 1", true), 1);
                        si.Add(World.GetDimensionValuePair("Boolean 1", false), 0);
                        si.Add(World.GetDimensionValuePair("Boolean 2", true), 0);
                        si.Add(World.GetDimensionValuePair("Boolean 2", false), 1);
                    }
                    else if (r < .75)
                    {
                        //False:True
                        si.Add(World.GetDimensionValuePair("Boolean 1", true), 0);
                        si.Add(World.GetDimensionValuePair("Boolean 1", false), 1);
                        si.Add(World.GetDimensionValuePair("Boolean 2", true), 1);
                        si.Add(World.GetDimensionValuePair("Boolean 2", false), 0);
                    }
                    else
                    {
                        //False:False
                        si.Add(World.GetDimensionValuePair("Boolean 1", true), 0);
                        si.Add(World.GetDimensionValuePair("Boolean 1", false), 1);
                        si.Add(World.GetDimensionValuePair("Boolean 2", true), 0);
                        si.Add(World.GetDimensionValuePair("Boolean 2", false), 1);
                    }

                    John.Perceive(si);

                    ExternalActionChunk chosen = John.GetChosenExternalAction(si);

                    if ((bool)chosen.LabelAsIComparable)
                    {
                        //The agent said "True".
                        if ((si["Boolean 1", true] == John.Parameters.MAX_ACTIVATION &&
                             si["Boolean 2", false] == John.Parameters.MAX_ACTIVATION) ||
                            (si["Boolean 1", false] == John.Parameters.MAX_ACTIVATION &&
                             si["Boolean 2", true] == John.Parameters.MAX_ACTIVATION))
                        {
                            //The agent responded correctly
                            Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                            //Record the agent's success.
                            CorrectCounter++;
                            //Give positive feedback.
                            John.ReceiveFeedback(si, 1.0);
                        }
                        else
                        {
                            //The agent responded incorrectly
                            Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                            //Give negative feedback.
                            John.ReceiveFeedback(si, 0.0);
                        }
                    }
                    else
                    {
                        //The agent said "False".
                        if ((si["Boolean 1", true] == John.Parameters.MAX_ACTIVATION &&
                             si["Boolean 2", true] == John.Parameters.MAX_ACTIVATION) ||
                            (si["Boolean 1", false] == John.Parameters.MAX_ACTIVATION &&
                             si["Boolean 2", false] == John.Parameters.MAX_ACTIVATION))
                        {
                            //The agent responded correctly
                            Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                            //Record the agent's success.
                            CorrectCounter++;
                            //Give positive feedback.
                            John.ReceiveFeedback(si, 1.0);
                        }
                        else
                        {
                            //The agent responded incorrectly
                            Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                            //Give negative feedback.
                            John.ReceiveFeedback(si, 0.0);
                        }
                    }
                }
            }while (!ReportResults(block_count));

            sw.Close();

            Console.SetOut(orig);
            Console.WriteLine("John has completed the task");
            Console.WriteLine("Killing John");
            John.Die();
            Console.WriteLine("John is Dead");
            Console.WriteLine("XOR Task Completed. See XOR.txt for Results");
            Console.Write("Press any key to exit");
            Console.ReadKey(true);
        }
Пример #25
0
        static void Main(string[] args)
        {
            //Initialize the task
            Console.WriteLine("Initializing the Simple Hello World Task");

            int CorrectCounter = 0;
            int NumberTrials   = 10000;
            int progress       = 0;

            World.LoggingLevel = TraceLevel.Off;

            TextWriter   orig = Console.Out;
            StreamWriter sw   = File.CreateText("HelloWorldSimple.txt");

            DimensionValuePair hi  = World.NewDimensionValuePair("Salutation", "Hellow");
            DimensionValuePair bye = World.NewDimensionValuePair("Salutation", "Goodbyew");

            ExternalActionChunk sayHi  = World.NewExternalActionChunk("Hello");
            ExternalActionChunk sayBye = World.NewExternalActionChunk("Goodbye");

            //Initialize the Agent
            Agent John = World.NewAgent("John");

            SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory);

            net.Input.Add(hi);
            net.Input.Add(bye);

            net.Output.Add(sayHi);
            net.Output.Add(sayBye);

            John.Commit(net);

            net.Parameters.LEARNING_RATE = 1;
            John.ACS.Parameters.PERFORM_RER_REFINEMENT = false;

            //Run the task
            Console.WriteLine("Running the Simple Hello World Task");
            Console.SetOut(sw);

            Random             rand = new Random();
            SensoryInformation si;

            ExternalActionChunk chosen;

            for (int i = 0; i < NumberTrials; i++)
            {
                si = World.NewSensoryInformation(John);

                //Randomly choose an input to perceive.
                if (rand.NextDouble() < .5)
                {
                    //Say "Hello"
                    si.Add(hi, John.Parameters.MAX_ACTIVATION);
                    si.Add(bye, John.Parameters.MIN_ACTIVATION);
                }
                else
                {
                    //Say "Goodbye"
                    si.Add(hi, John.Parameters.MIN_ACTIVATION);
                    si.Add(bye, John.Parameters.MAX_ACTIVATION);
                }

                //Perceive the sensory information
                John.Perceive(si);

                //Choose an action
                chosen = John.GetChosenExternalAction(si);

                //Deliver appropriate feedback to the agent
                if (chosen == sayHi)
                {
                    //The agent said "Hello".
                    if (si[hi] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }
                else
                {
                    //The agent said "Goodbye".
                    if (si[bye] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }

                Console.SetOut(orig);
                progress           = (int)(((double)(i + 1) / (double)NumberTrials) * 100);
                Console.CursorLeft = 0;
                Console.Write(progress + "% Complete..");
                Console.SetOut(sw);
            }

            //Report Results

            Console.WriteLine("Reporting Results for the Simple Hello World Task");
            Console.WriteLine("John got " + CorrectCounter + " correct out of " + NumberTrials + " trials (" +
                              (int)Math.Round(((double)CorrectCounter / (double)NumberTrials) * 100) + "%)");

            Console.WriteLine("At the end of the task, John had learned the following rules:");
            foreach (var i in John.GetInternals(Agent.InternalContainers.ACTION_RULES))
            {
                Console.WriteLine(i);
            }

            sw.Close();
            Console.SetOut(orig);
            Console.CursorLeft = 0;
            Console.WriteLine("100% Complete..");
            //Kill the agent to end the task
            Console.WriteLine("Killing John to end the program");
            John.Die();
            Console.WriteLine("John is Dead");

            Console.WriteLine("The Simple Hello World Task has finished");
            Console.WriteLine("The results have been saved to \"HelloWorldSimple.txt\"");
            Console.Write("Press any key to exit");
            Console.ReadKey(true);
        }
Пример #26
0
        private void Run()
        {
            SensoryInformation si;

            bool shuffle = true;

            for (int b = 0; b < numBlocks; b++)
            {
                Console.Write("Starting block # " + (b + 1));
                if (shuffle)
                {
                    Console.WriteLine("... Shuffling pegs.");
                    for (int i = 0; i < 5; i++)
                    {
                        corelations[i] = rand.Next(5);
                        while (corelations[i] == i)
                        {
                            corelations[i] = rand.Next(5);
                        }

                        Console.WriteLine("Starting Peg " + (i + 1) + " --> Target Peg " + (corelations[i] + 1));
                    }
                }
                else
                {
                    Console.WriteLine();
                }

                int progress = 0;

                for (int i = 0; i < numTrials; i++)
                {
                    si = World.NewSensoryInformation(John);
                    int peg = 0;
                    switch (rand.Next(5))
                    {
                    case 0:
                        si.Add(p1, 1);
                        si.Add(p2, 0);
                        si.Add(p3, 0);
                        si.Add(p4, 0);
                        si.Add(p5, 0);
                        peg = 1;
                        break;

                    case 1:
                        si.Add(p1, 0);
                        si.Add(p2, 1);
                        si.Add(p3, 0);
                        si.Add(p4, 0);
                        si.Add(p5, 0);
                        peg = 2;
                        break;

                    case 2:
                        si.Add(p1, 0);
                        si.Add(p2, 0);
                        si.Add(p3, 1);
                        si.Add(p4, 0);
                        si.Add(p5, 0);
                        peg = 3;
                        break;

                    case 3:
                        si.Add(p1, 0);
                        si.Add(p2, 0);
                        si.Add(p3, 0);
                        si.Add(p4, 1);
                        si.Add(p5, 0);
                        peg = 4;
                        break;

                    case 4:
                        si.Add(p1, 0);
                        si.Add(p2, 0);
                        si.Add(p3, 0);
                        si.Add(p4, 0);
                        si.Add(p5, 1);
                        peg = 5;
                        break;
                    }

                    Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "For block " + (b + 1) + ", trial # " + (i + 1) + ": The starting peg is " +
                                      peg + ", the target peg is " +
                                      (corelations[peg - 1] + 1));

                    John.Perceive(si);

                    ExternalActionChunk chosen = John.GetChosenExternalAction(si);

                    if (corelations[peg - 1] + 1 == (int)chosen.First().Value.AsIComparable)
                    {
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        John.ReceiveFeedback(si, 1);
                        numCorrect++;
                    }
                    else
                    {
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        John.ReceiveFeedback(si, 0);
                    }

                    progress           = (int)(((double)(i + 1) / (double)numTrials) * 100);
                    Console.CursorLeft = 0;
                    Console.Write(progress + "% Complete..");
                }
                Console.WriteLine();

                Console.WriteLine("Block " + (b + 1) + " is finished. Let's see how John did...");
                Console.WriteLine("John's performance on block " + (b + 1) + ": " +
                                  Math.Round(((double)numCorrect / (double)numTrials) * 100) + "%");
                Console.WriteLine("Rules John learned:");
                foreach (var r in John.GetInternals(Agent.InternalContainers.ACTION_RULES))
                {
                    Console.WriteLine(r);
                }
                Console.Write("For the next block, would you like to shuffle the pegs to see if John can adjust (y = yes, n = no, x = exit)?");
                char ans = Console.ReadKey().KeyChar;
                Console.WriteLine();

                if (ans == 'y')
                {
                    shuffle = true;
                }
                else if (ans == 'n')
                {
                    shuffle = false;
                }
                else if (ans == 'x')
                {
                    break;
                }
                numCorrect = 0;
            }

            //Kill the agent to end the task
            Console.WriteLine("Killing John to end the program");
            John.Die();
            Console.WriteLine("John is Dead");

            Console.WriteLine("The Simple Tower of Hanoi Task has finished");
            Console.Write("Press any key to exit");
            Console.ReadKey(true);
        }
Пример #27
0
        public void Initialize()
        {
            // Dimension Value Pairs:
            sayWhat = World.NewDimensionValuePair("YourAction", "What do you want to do?");

            // External Action Chunks:
            sayCooperate = World.NewExternalActionChunk("Cooperate");
            sayDefect    = World.NewExternalActionChunk("Defect");

            // placeholder
            // GoalChunk salute = World.NewGoalChunk("Salute");
            // GoalChunk bidFarewell = World.NewGoalChunk("Bid Farewell");

            // WM Actions:
            wmuacC = World.NewWorkingMemoryUpdateActionChunk("Remember my opponent cooperated");
            wmuacD = World.NewWorkingMemoryUpdateActionChunk("Remember my opponent defected");

            DeclarativeChunk dcoc = World.NewDeclarativeChunk("My opponent cooperated");
            DeclarativeChunk dcod = World.NewDeclarativeChunk("My opponent defected");

            wmuacC.Add(WorkingMemory.RecognizedActions.SET_RESET, dcoc);
            wmuacD.Add(WorkingMemory.RecognizedActions.SET_RESET, dcod);

            // Set up a two agent model (meaning two agents with the same setup, playing against each other)
            Alice = World.NewAgent("Alice");
            Bob   = World.NewAgent("Bob");

            // Simulating environment will determine inputs to each agent based on what each agent does..

            // Feedback is determined by payoff matrix..

            payoff = new int [2, 2, 2];

            // Doing this the hard way. Could set this up all in-line above, but this makes the table
            // more explicit in terms of how we want to use it.
            // The payoff matrix here is called "Friend or Foe", about the simplest case
            // indices mean: FOR-WHICH-AGENT, WHAT-ALICE-DOES, WHAT-BOB-DOES
            payoff[_ALICE, _COOPERATE, _COOPERATE] = 1;
            payoff[_ALICE, _COOPERATE, _DEFECT]    = 0;
            payoff[_ALICE, _DEFECT, _COOPERATE]    = 2;
            payoff[_ALICE, _DEFECT, _DEFECT]       = 0;
            payoff[_BOB, _COOPERATE, _COOPERATE]   = 1;
            payoff[_BOB, _COOPERATE, _DEFECT]      = 2;
            payoff[_BOB, _DEFECT, _COOPERATE]      = 0;
            payoff[_BOB, _DEFECT, _DEFECT]         = 0;

            maxpay = 2;

            results = new int[_TRIALS, 2, 2];

            // Set up a Q-learning Net =
            // -- Eligibility Condition = True if "What do you want to do?" is in input, otherwise False
            // -- Input = "My opponent cooperated", "My opponent defected", "What do you want to do?"
            // -- Output = "I want to defect", "I want to cooperate"
            //
            // Also, RER is turned ON

            QBPNetwork net_A = AgentInitializer.InitializeImplicitDecisionNetwork(Alice, QBPNetwork.Factory, QNetEC);

            net_A.Input.Add(sayWhat);
            net_A.Input.Add(sayCooperate);
            net_A.Input.Add(sayDefect);
            net_A.Output.Add(sayCooperate);
            net_A.Output.Add(sayDefect);

            Alice.Commit(net_A);
            net_A.Parameters.LEARNING_RATE = 1;
            Alice.ACS.Parameters.PERFORM_RER_REFINEMENT            = true; // it's true by default anyway
            Alice.ACS.Parameters.LEVEL_SELECTION_METHOD            = ActionCenteredSubsystem.LevelSelectionMethods.COMBINED;
            Alice.ACS.Parameters.LEVEL_SELECTION_OPTION            = ActionCenteredSubsystem.LevelSelectionOptions.FIXED;
            Alice.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE  = 1;
            Alice.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE  = 1;
            Alice.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 1;
            Alice.ACS.Parameters.WM_UPDATE_ACTION_PROBABILITY      = 1;

            // Rules (2 rules) =
            // Rule 1:
            // -- Condition = "Your opponent cooperated"
            // -- Action = Set "My opponent cooperated" in WM
            // Rule 2:
            // -- Condition = "Your opponent defected"
            // -- Action = Set "My opponent defect" in WM

            FixedRule ruleA1 = AgentInitializer.InitializeActionRule(Alice, FixedRule.Factory, wmuacC, FRSC);
            FixedRule ruleA2 = AgentInitializer.InitializeActionRule(Alice, FixedRule.Factory, wmuacD, FRSC);

            Alice.Commit(ruleA1);
            Alice.Commit(ruleA2);

            QBPNetwork net_B = AgentInitializer.InitializeImplicitDecisionNetwork(Bob, QBPNetwork.Factory, QNetEC);

            net_B.Input.Add(sayWhat);
            net_B.Input.Add(sayCooperate);
            net_B.Input.Add(sayDefect);
            net_B.Output.Add(sayCooperate);
            net_B.Output.Add(sayDefect);

            Bob.Commit(net_B);

            // Use Weighted Combination
            // NO partial match on TL
            net_B.Parameters.LEARNING_RATE                       = 1;
            Bob.ACS.Parameters.PERFORM_RER_REFINEMENT            = true;
            Bob.ACS.Parameters.LEVEL_SELECTION_METHOD            = ActionCenteredSubsystem.LevelSelectionMethods.COMBINED;
            Bob.ACS.Parameters.LEVEL_SELECTION_OPTION            = ActionCenteredSubsystem.LevelSelectionOptions.FIXED;
            Bob.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE  = 1;
            Bob.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE  = 1;
            Bob.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 1;
            Bob.ACS.Parameters.WM_UPDATE_ACTION_PROBABILITY      = 1;

            FixedRule ruleB1 = AgentInitializer.InitializeActionRule(Bob, FixedRule.Factory, wmuacC, FRSC);
            FixedRule ruleB2 = AgentInitializer.InitializeActionRule(Bob, FixedRule.Factory, wmuacD, FRSC);

            Bob.Commit(ruleB1);
            Bob.Commit(ruleB2);

            // Initially using the same parameters for RER as Full Hello World
            RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.6;
            RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.1;
            RefineableActionRule.GlobalParameters.INFORMATION_GAIN_OPTION    = RefineableActionRule.IGOptions.PERFECT;

            /*
             * Note -- What should be seems is that when you pass in "Your opponent…",
             * the agent should return the "Do Nothing" external action
             * (since it performed an internal WM action)..
             * However, you can just ignore this either way..
             */
        }
Пример #28
0
        public void RunTaskStep(int tick, ref int choicesMade, ref int correctChoicesMade)
        {
            // check that we have a valid enumerator
            // also check that the time of the hero's next state update matches the current tick
            //	otherwise, there's nothing for us to do here until the simulation advances further
            if (MyHero.IsEnumeratorValid() == false || MyHero.GetCurrentStep().Tick > tick)
            {
                return;
            }



            List <Item> purchasedItems = new List <Item>();

            // pull out any purchases made by the player at this time step
            foreach (StateChange diff in MyHero.GetCurrentStep().Diffs)
            {
                if (diff.Type == UpdateType.ItemPurchase)
                {
                    Item newItem = ((ItemPurchase)diff).NewItem;
                    if (newItem.IsConsumable() == false && newItem.IsPurchasable() == true)
                    {
                        purchasedItems.Add(((ItemPurchase)diff).NewItem);
                    }
                }
            }

            /*
             * MyAgent.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE = 0.0;
             * MyAgent.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 0.0;
             * MyAgent.ACS.Parameters.FIXED_IRL_LEVEL_SELECTION_MEASURE = 0;
             * MyAgent.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE = 1.0;
             */


            // for each purchase made by the player at this step, give the agent a chance to try to make the
            //	same purchase
            // Note: the state may change without a purchase being made, in which case there is nothing for the agent to do,
            //	we should just update the state and move on (in the calling function, for now)
            // Note: it is possible for the log file to record more than one purchase for a single time step,
            //	this seems to be an artifact of the way missing components can be automatically purchased to upgrade an item.
            //	It doesn't seem to happen often, and I'm not sure why it happens some times and not others.
            //	In such a case, we will "peek ahead" and add the purchased item into the inventory before asking the agent
            //	to make decisions corresponding to each subsequent purchase.
            //	After we have run through all the purchases, we will remove these purchases from the inventory counts and
            //	update normally (which will add them back in).
            foreach (Item item in purchasedItems)
            {
                // if we are using imitative learning, learning by example, supervised learning...
                // we are going to temporarily create a fixed rule in clarion, that suggests to the agent
                //	that it perform the same action that the human took
                // we will then retract the rule after the agent makes its choice
                // Note: another (possibly better) approach would be to set up fixed rules for each possible
                //	action the human might take and leave them all active
                // As far as I currently know, there is no way to specify a single rule that simple says
                // "if human did x, do x", we have to explicitly specify a new rule for each possible action

                // get the action chunk corresponding to the action the human player took
                PlayerChoice = PurchaseActions[(int)(item.Id)];

                // set up a fixed rule that specifies the same action the human player took
                // since the rule should only be active when it is relevant, we shouldn't really need to
                //	bother with calculating a support value. If it is active, it should have a fixed support of 1.
//				FixedRule imitateRule = AgentInitializer.InitializeActionRule(MyAgent, FixedRule.Factory, playerChoice, ImitativeSupportDelegate);
//				IRLRule imitateRule = AgentInitializer.InitializeActionRule(MyAgent, IRLRule.Factory, playerChoice, ImitativeSupportDelegate);
//				MyAgent.Commit(imitateRule);


                // sensory input to the Calrion agent
                //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                // do we need to be creating a new object every time?
                // can we reuse the old and only change updated percepts?
                // is this currently a big performance hit?
                //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                SensoryInformation sensoryInput = World.NewSensoryInformation(MyAgent);

                // output from the Clarion agent
                ExternalActionChunk myChoice = null;


                // do perception

                // perceive the hero's current level
                for (int i = 0; i < 25; i++)
                {
                    if (i == MyHero.Level - 1)
                    {
                        sensoryInput.Add(LevelInputs[MyHero.Level - 1], MyAgent.Parameters.MAX_ACTIVATION);
                    }
                    else
                    {
                        sensoryInput.Add(LevelInputs[i], MyAgent.Parameters.MIN_ACTIVATION);
                    }
                }

                // set the input that corresponds to the count of each item to maximum
                // I **think** it is not neccessary to set the other possible inputs (i.e., for different count numbers) to minimum
                //	but I could be wrong about that
                // Note: we are assuming count will never be > 3 - it could be (buy more than 3 copies of an item), but there is never
                //	likely to be a reason to do so
                for (int i = 0; i < Items.Length; i++)
                {
//					Console.Write(((ItemId)i).ToString() + "=" + Items[i] + "; ");
                    for (int j = 0; j < 4; j++)
                    {
                        if (j == Items[i])
                        {
                            sensoryInput.Add(InventoryInputs[i, Items[i]], MyAgent.Parameters.MAX_ACTIVATION);
                        }
                        else
                        {
                            sensoryInput.Add(InventoryInputs[i, j], MyAgent.Parameters.MIN_ACTIVATION);
                        }
                    }

/*					if (Items[i] > 0) {
 *                                              sensoryInput.Add(InventoryInputs[i, Items[i]], MyAgent.Parameters.MAX_ACTIVATION);
 *                                      }
 */
                }
//				Console.WriteLine("");

                // perceive
                MyAgent.Perceive(sensoryInput);
                // and choose an action
                myChoice = MyAgent.GetChosenExternalAction(sensoryInput);

                choicesMade++;

                // deliver appropriate feedback
                if (myChoice == PlayerChoice)                   // agent was right
                {
                    MyAgent.ReceiveFeedback(sensoryInput, 1.0);
//					Console.WriteLine(tick + ": " + MyHero.Name + ": player and agent bought: " + item.Id.ToString());
                    correctChoicesMade++;
                }
                else
                {
                    MyAgent.ReceiveFeedback(sensoryInput, 0.0);                         // agent was wrong
//					String choiceString = myChoice.ToString().Split(':')[0].Split(' ')[1];
//					Console.WriteLine(tick + ": " + MyHero.Name + ": player bought: " + item.Id.ToString() + "; agent bought: " + choiceString);
                }

                // if we have created a fixed rule to support imitation of the human player, then
                //	retract (remove) it here
//				MyAgent.Retract(imitateRule, Agent.InternalContainers.ACTION_RULES);
//				MyAgent.Remove(imitateRule);

                // increment the count for the item purchased by the player (this will get decremented later, then updated based on the inventory state)
                // this is to handle the case where more than one purchase is recorded for a single time step (something a human can't actually do,
                //	but that the software may record as a result of automated purchases during an upgrade)
                Items[(int)(item.Id)]++;
            }

            // decrement the count of each purchased item
            // this is to handle the case where more than one purchase is recorded for a single time step (something a human can't actually do,
            //	but that the software may record as a result of automated purchases during an upgrade)
            foreach (Item item in purchasedItems)
            {
                Items[(int)(item.Id)]--;
            }
        }
Пример #29
0
        private void Run()
        {
            SensoryInformation siA;
            SensoryInformation siB;

            ExternalActionChunk chosenA = ExternalActionChunk.DO_NOTHING;
            ExternalActionChunk chosenB = ExternalActionChunk.DO_NOTHING;

            double payA, payB;

            Random rand = new Random();

            // int indxA = _COOPERATE;  // 0 or 1 for Cooperate or Defect
            //  int indxB = _COOPERATE;  // 0 or 1 for Cooperate or Defect

            // In this run, we have Alice playing Rappoport's Tit for Tat (TFT) strategy
            // over a long series of trials, while Bob does whatever he wants.
            // Eventually, each should arrive at Cooperate.

            for (int i = 0; i < _TRIALS; i++)
            {
                for (int j = 0; j < _ROUNDS; j++)
                {
                    PrintToConsole("Trial #" + i + ", Round #" + j + "     ");

                    if (chosenA == ExternalActionChunk.DO_NOTHING && chosenB == ExternalActionChunk.DO_NOTHING)
                    {
                        siA = World.NewSensoryInformation(Alice);
                        siB = World.NewSensoryInformation(Bob);

                        siA.Add(sayWhat, 1);
                        siB.Add(sayWhat, 1);

                        // Perceive the sensory information
                        Alice.Perceive(siA);
                        Bob.Perceive(siB);

                        chosenA = Alice.GetChosenExternalAction(siA);
                        chosenB = Bob.GetChosenExternalAction(siB);
                    }
                    else
                    {
                        PrintToLog("OOPS");
                        throw new DivideByZeroException("OOPS");
                    }

                    payA = ComputePayoff(_ALICE, chosenA, chosenB);
                    payB = ComputePayoff(_BOB, chosenA, chosenB);
                    TallyResults(i, chosenA, chosenB);

                    PrintToLog("Alice gets " + payA + "; Bob gets " + payB);

                    Alice.ReceiveFeedback(siA, payA);
                    Bob.ReceiveFeedback(siB, payB);

                    siA = World.NewSensoryInformation(Alice);
                    siB = World.NewSensoryInformation(Bob);

                    // Perceive the other player's chosen action
                    siA.Add(chosenB, 1);
                    siB.Add(chosenA, 1);

                    Alice.Perceive(siA);
                    Bob.Perceive(siB);

                    // Choose an action (Note: DO_NOTHING is expected here)
                    chosenA = Alice.GetChosenExternalAction(siA);
                    chosenB = Bob.GetChosenExternalAction(siB);
                }
            }
        }