コード例 #1
0
        public static void Initialize()
        {
            World.LoggingLevel = TraceLevel.Off;

            John = World.NewAgent("John");

            SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory);

            net.Input.Add(World.NewDimensionValuePair("Boolean 1", true));
            net.Input.Add(World.NewDimensionValuePair("Boolean 1", false));
            net.Input.Add(World.NewDimensionValuePair("Boolean 2", true));
            net.Input.Add(World.NewDimensionValuePair("Boolean 2", false));

            net.Output.Add(World.NewExternalActionChunk(true));
            net.Output.Add(World.NewExternalActionChunk(false));

            John.ACS.Parameters.PERFORM_RER_REFINEMENT = false;
            John.ACS.Parameters.SELECTION_TEMPERATURE  = .01;

            John.ACS.Parameters.LEVEL_SELECTION_OPTION = ActionCenteredSubsystem.LevelSelectionOptions.FIXED;

            John.ACS.Parameters.FIXED_IRL_LEVEL_SELECTION_MEASURE = 0;
            John.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE  = 0;

            //Tweak these parameters to see the impact each has on accuracy and learning
            net.Parameters.LEARNING_RATE = 1;
            net.Parameters.MOMENTUM      = .02;
            John.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE  = .5;
            John.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = .5;

            John.Commit(net);
        }
コード例 #2
0
        public Clarion_AGC_Model(Agent MAKaey)
        {
            World.LoggingLevel = System.Diagnostics.TraceLevel.Off;

            this.MAKaey   = MAKaey;
            NeuralNetwork = AgentInitializer.InitializeImplicitDecisionNetwork(MAKaey, SimplifiedQBPNetwork.Factory);
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: papeldeorigami/ia941
        private void Initialize()
        {
            World.LoggingLevel = TraceLevel.Off;

            p1 = World.NewDimensionValuePair("Peg", 1);
            p2 = World.NewDimensionValuePair("Peg", 2);
            p3 = World.NewDimensionValuePair("Peg", 3);
            p4 = World.NewDimensionValuePair("Peg", 4);
            p5 = World.NewDimensionValuePair("Peg", 5);

            mp1 = World.NewExternalActionChunk();
            mp2 = World.NewExternalActionChunk();
            mp3 = World.NewExternalActionChunk();
            mp4 = World.NewExternalActionChunk();
            mp5 = World.NewExternalActionChunk();

            mp1 += p1;
            mp2 += p2;
            mp3 += p3;
            mp4 += p4;
            mp5 += p5;

            John = World.NewAgent();

            net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory);

            net.Input.Add(p1);
            net.Input.Add(p2);
            net.Input.Add(p3);
            net.Input.Add(p4);
            net.Input.Add(p5);

            net.Output.Add(mp1);
            net.Output.Add(mp2);
            net.Output.Add(mp3);
            net.Output.Add(mp4);
            net.Output.Add(mp5);

            net.Parameters.LEARNING_RATE = 1;
            net.Parameters.MOMENTUM      = .01;

            John.Commit(net);

            RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.01;
            RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.4;
        }
コード例 #4
0
        static void Main(string[] args)
        {
            //Initialize the task
            Console.WriteLine("Initializing the Simple Hello World Task");

            int CorrectCounter = 0;
            int NumberTrials   = 10000;
            int progress       = 0;

            World.LoggingLevel = TraceLevel.Off;

            TextWriter   orig = Console.Out;
            StreamWriter sw   = File.CreateText("HelloWorldSimple.txt");

            DimensionValuePair hi  = World.NewDimensionValuePair("Salutation", "Hellow");
            DimensionValuePair bye = World.NewDimensionValuePair("Salutation", "Goodbyew");

            ExternalActionChunk sayHi  = World.NewExternalActionChunk("Hello");
            ExternalActionChunk sayBye = World.NewExternalActionChunk("Goodbye");

            //Initialize the Agent
            Agent John = World.NewAgent("John");

            SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory);

            net.Input.Add(hi);
            net.Input.Add(bye);

            net.Output.Add(sayHi);
            net.Output.Add(sayBye);

            John.Commit(net);

            net.Parameters.LEARNING_RATE = 1;
            John.ACS.Parameters.PERFORM_RER_REFINEMENT = false;

            //Run the task
            Console.WriteLine("Running the Simple Hello World Task");
            Console.SetOut(sw);

            Random             rand = new Random();
            SensoryInformation si;

            ExternalActionChunk chosen;

            for (int i = 0; i < NumberTrials; i++)
            {
                si = World.NewSensoryInformation(John);

                //Randomly choose an input to perceive.
                if (rand.NextDouble() < .5)
                {
                    //Say "Hello"
                    si.Add(hi, John.Parameters.MAX_ACTIVATION);
                    si.Add(bye, John.Parameters.MIN_ACTIVATION);
                }
                else
                {
                    //Say "Goodbye"
                    si.Add(hi, John.Parameters.MIN_ACTIVATION);
                    si.Add(bye, John.Parameters.MAX_ACTIVATION);
                }

                //Perceive the sensory information
                John.Perceive(si);

                //Choose an action
                chosen = John.GetChosenExternalAction(si);

                //Deliver appropriate feedback to the agent
                if (chosen == sayHi)
                {
                    //The agent said "Hello".
                    if (si[hi] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }
                else
                {
                    //The agent said "Goodbye".
                    if (si[bye] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }

                Console.SetOut(orig);
                progress           = (int)(((double)(i + 1) / (double)NumberTrials) * 100);
                Console.CursorLeft = 0;
                Console.Write(progress + "% Complete..");
                Console.SetOut(sw);
            }

            //Report Results

            Console.WriteLine("Reporting Results for the Simple Hello World Task");
            Console.WriteLine("John got " + CorrectCounter + " correct out of " + NumberTrials + " trials (" +
                              (int)Math.Round(((double)CorrectCounter / (double)NumberTrials) * 100) + "%)");

            Console.WriteLine("At the end of the task, John had learned the following rules:");
            foreach (var i in John.GetInternals(Agent.InternalContainers.ACTION_RULES))
            {
                Console.WriteLine(i);
            }

            sw.Close();
            Console.SetOut(orig);
            Console.CursorLeft = 0;
            Console.WriteLine("100% Complete..");
            //Kill the agent to end the task
            Console.WriteLine("Killing John to end the program");
            John.Die();
            Console.WriteLine("John is Dead");

            Console.WriteLine("The Simple Hello World Task has finished");
            Console.WriteLine("The results have been saved to \"HelloWorldSimple.txt\"");
            Console.Write("Press any key to exit");
            Console.ReadKey(true);
        }
コード例 #5
0
        public void Init()
        {
            MyAgent = World.NewAgent(MyHero.Name);

            // set up the network and learning parameters for the agent
            SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(MyAgent, SimplifiedQBPNetwork.Factory);

            net = AgentInitializer.InitializeImplicitDecisionNetwork(MyAgent, SimplifiedQBPNetwork.Factory);

            for (int i = 0; i < HealthInputs.GetLength(0); i++)
            {
//				net.Input.Add(HealthInputs[i]);
//				net.Input.Add(ManaInputs[i]);
            }
            for (int i = 0; i < InventoryInputs.GetLength(0); i++)
            {
                for (int j = 0; j < InventoryInputs.GetLength(1); j++)
                {
                    net.Input.Add(InventoryInputs[i, j]);
                }

                net.Output.Add(PurchaseActions[i]);
            }

            FixedRule fr;

            for (int i = 0; i < PurchaseActions.Length; i++)
            {
                fr = AgentInitializer.InitializeActionRule(MyAgent, FixedRule.Factory, PurchaseActions[i], ImitativeSupportDelegate);

//				fr.GeneralizedCondition.Add(Inputs[i], true);

                MyAgent.Commit(fr);
            }

//			AddRules();

            //net.Parameters.LEARNING_RATE = 0.5;
            net.Parameters.LEARNING_RATE = 2.0;
//			net.Parameters.MOMENTUM = 0.02;

            MyAgent.Commit(net);


            MyAgent.ACS.Parameters.SELECTION_TEMPERATURE = 0.05;
            MyAgent.ACS.Parameters.DELETION_FREQUENCY    = 100;

            MyAgent.ACS.Parameters.LEVEL_SELECTION_METHOD = ActionCenteredSubsystem.LevelSelectionMethods.COMBINED;
            MyAgent.ACS.Parameters.LEVEL_SELECTION_OPTION = ActionCenteredSubsystem.LevelSelectionOptions.FIXED;


            // set up the probabilities used to select which system will be chosen to select an action
            // at each step (should total 1.0):
            //	BL - bottom layer (reinforcement learning neural net)
            //	RER - rule extraction and refinement - extracts rules from the bottom layer
            //	IRL - independent rule learning - does not use the bottom layer for learning rules
            //  FR - fixed rules - Clarion cannot change these (though they can be added/removed externally)
            // We are currently using fixed rules when we want the agent to immitate the human player and train
            //	the bottom layer
            MyAgent.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE  = 0.33;
            MyAgent.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 0.33;
            MyAgent.ACS.Parameters.FIXED_IRL_LEVEL_SELECTION_MEASURE = 0;
            MyAgent.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE  = 0.33;

            /*
             * MyAgent.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE = 0.75;
             * MyAgent.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 0.25;
             * MyAgent.ACS.Parameters.FIXED_IRL_LEVEL_SELECTION_MEASURE = 0;
             * MyAgent.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE = 0;
             */


            //MyAgent.ACS.Parameters.VARIABLE_BL_BETA = 0.5;
            //MyAgent.ACS.Parameters.VARIABLE_RER_BETA = 0.5;
            //MyAgent.ACS.Parameters.VARIABLE_IRL_BETA = 0;
            //MyAgent.ACS.Parameters.VARIABLE_FR_BETA = 0;
        }
コード例 #6
0
        /// <summary>
        /// Setup the ACS subsystem
        /// </summary>
        private void SetupACS()
        {
            // Create Rule to avoid collision with wall
            SupportCalculator avoidCollisionWallSupportCalculator = FixedRuleToAvoidCollisionWall;
            FixedRule         ruleAvoidCollisionWall = AgentInitializer.InitializeActionRule(CurrentAgent, FixedRule.Factory, outputRotateClockwise, avoidCollisionWallSupportCalculator);

            // Commit this rule to Agent (in the ACS)
            CurrentAgent.Commit(ruleAvoidCollisionWall);

            // Create Rule To Go Ahead
            SupportCalculator goAheadSupportCalculator = FixedRuleToGoAhead;
            FixedRule         ruleGoAhead = AgentInitializer.InitializeActionRule(CurrentAgent, FixedRule.Factory, outputGoAhead, goAheadSupportCalculator);

            // Commit this rule to Agent (in the ACS) - FMT commenting to avoid conflict
            //CurrentAgent.Commit(ruleGoAhead);

            // FMT 29/04/2017
            // FMT Create Rule to Eat
            SupportCalculator eatSupportCalculator = FixedRuleToEat;
            FixedRule         ruleEat = AgentInitializer.InitializeActionRule(CurrentAgent, FixedRule.Factory, outputEat, eatSupportCalculator);

            CurrentAgent.ACS.Parameters.PERFORM_RER_REFINEMENT            = true;
            CurrentAgent.ACS.Parameters.LEVEL_SELECTION_METHOD            = ActionCenteredSubsystem.LevelSelectionMethods.COMBINED;
            CurrentAgent.ACS.Parameters.LEVEL_SELECTION_OPTION            = ActionCenteredSubsystem.LevelSelectionOptions.FIXED;
            CurrentAgent.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE  = 1;
            CurrentAgent.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE  = 1;
            CurrentAgent.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 1;
            CurrentAgent.ACS.Parameters.WM_UPDATE_ACTION_PROBABILITY      = 1;

            // Commit this rule to Agent (in the ACS)
            CurrentAgent.Commit(ruleEat);

            // FMT Create Rule to Get
            SupportCalculator getSupportCalculator = FixedRuleToGet;
            FixedRule         ruleGet = AgentInitializer.InitializeActionRule(CurrentAgent, FixedRule.Factory, outputGet, getSupportCalculator);

            // Commit this rule to Agent (in the ACS)
            CurrentAgent.Commit(ruleGet);

            // FMT Create Rule to Hide
            SupportCalculator hideSupportCalculator = FixedRuleToHide;
            FixedRule         ruleHide = AgentInitializer.InitializeActionRule(CurrentAgent, FixedRule.Factory, outputHide, hideSupportCalculator);

            // Commit this rule to Agent (in the ACS)
            CurrentAgent.Commit(ruleHide);

            // FMT Create Rule to Go To
            SupportCalculator gotoSupportCalculator = FixedRuleToGoTo;
            FixedRule         ruleGoto = AgentInitializer.InitializeActionRule(CurrentAgent, FixedRule.Factory, outputGoTo, gotoSupportCalculator);

            // Commit this rule to Agent (in the ACS)
            CurrentAgent.Commit(ruleGoto);

            // FMT Create Rule to Stop
            SupportCalculator stopSupportCalculator = FixedRuleToStop;
            FixedRule         ruleStop = AgentInitializer.InitializeActionRule(CurrentAgent, FixedRule.Factory, outputStop, stopSupportCalculator);

            // Commit this rule to Agent (in the ACS)
            CurrentAgent.Commit(ruleStop);

            // Disable Rule Refinement
            CurrentAgent.ACS.Parameters.PERFORM_RER_REFINEMENT = false;

            // The selection type will be probabilistic
            CurrentAgent.ACS.Parameters.LEVEL_SELECTION_METHOD = ActionCenteredSubsystem.LevelSelectionMethods.STOCHASTIC;

            // The action selection will be fixed (not variable) i.e. only the statement defined above.
            CurrentAgent.ACS.Parameters.LEVEL_SELECTION_OPTION = ActionCenteredSubsystem.LevelSelectionOptions.FIXED;

            // Define Probabilistic values
            CurrentAgent.ACS.Parameters.FIXED_FR_LEVEL_SELECTION_MEASURE  = 1;
            CurrentAgent.ACS.Parameters.FIXED_IRL_LEVEL_SELECTION_MEASURE = 0;
            CurrentAgent.ACS.Parameters.FIXED_BL_LEVEL_SELECTION_MEASURE  = 0;
            CurrentAgent.ACS.Parameters.FIXED_RER_LEVEL_SELECTION_MEASURE = 0;

            // FMT 13/05/2017 additonal setting for network
            SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(CurrentAgent, SimplifiedQBPNetwork.Factory);

            net.Parameters.LEARNING_RATE = 1;
            CurrentAgent.ACS.Parameters.PERFORM_RER_REFINEMENT = false;
        }
コード例 #7
0
        static void Main(string[] args)
        {
            //Initialize the task
            Console.WriteLine("Initializing the Full Hello World Task");

            int CorrectCounter = 0;
            int NumberTrials   = 20000;

            Random rand = new Random();

            World.LoggingLevel = TraceLevel.Off;

            int progress = 0;

            TextWriter   orig = Console.Out;
            StreamWriter sw   = File.CreateText("HelloWorldFull.txt");

            DimensionValuePair hi  = World.NewDimensionValuePair("Salutation", "Hello");
            DimensionValuePair bye = World.NewDimensionValuePair("Salutation", "Goodbye");

            ExternalActionChunk sayHi  = World.NewExternalActionChunk("Hello");
            ExternalActionChunk sayBye = World.NewExternalActionChunk("Goodbye");

            GoalChunk salute      = World.NewGoalChunk("Salute");
            GoalChunk bidFarewell = World.NewGoalChunk("Bid Farewell");

            //Initialize the Agent
            Agent John = World.NewAgent("John");

            SimplifiedQBPNetwork net = AgentInitializer.InitializeImplicitDecisionNetwork(John, SimplifiedQBPNetwork.Factory);

            net.Input.Add(salute, "goals");
            net.Input.Add(bidFarewell, "goals");

            net.Input.Add(hi);
            net.Input.Add(bye);

            net.Output.Add(sayHi);
            net.Output.Add(sayBye);

            net.Parameters.LEARNING_RATE = 1;

            John.Commit(net);

            John.ACS.Parameters.VARIABLE_BL_BETA  = .5;
            John.ACS.Parameters.VARIABLE_RER_BETA = .5;
            John.ACS.Parameters.VARIABLE_IRL_BETA = 0;
            John.ACS.Parameters.VARIABLE_FR_BETA  = 0;

            RefineableActionRule.GlobalParameters.SPECIALIZATION_THRESHOLD_1 = -.6;
            RefineableActionRule.GlobalParameters.GENERALIZATION_THRESHOLD_1 = -.1;
            RefineableActionRule.GlobalParameters.INFORMATION_GAIN_OPTION    = RefineableActionRule.IGOptions.PERFECT;

            AffiliationBelongingnessDrive ab = AgentInitializer.InitializeDrive(John, AffiliationBelongingnessDrive.Factory, rand.NextDouble(), (DeficitChangeProcessor)HelloWorldFull_DeficitChange);

            DriveEquation abd = AgentInitializer.InitializeDriveComponent(ab, DriveEquation.Factory);

            ab.Commit(abd);

            John.Commit(ab);

            AutonomyDrive aut = AgentInitializer.InitializeDrive(John, AutonomyDrive.Factory, rand.NextDouble(), (DeficitChangeProcessor)HelloWorldFull_DeficitChange);

            DriveEquation autd =
                AgentInitializer.InitializeDriveComponent(aut, DriveEquation.Factory);

            aut.Commit(autd);

            John.Commit(aut);

            GoalSelectionModule gsm =
                AgentInitializer.InitializeMetaCognitiveModule(John, GoalSelectionModule.Factory);

            GoalSelectionEquation gse =
                AgentInitializer.InitializeMetaCognitiveDecisionNetwork(gsm, GoalSelectionEquation.Factory);

            gse.Input.Add(ab.GetDriveStrength());
            gse.Input.Add(aut.GetDriveStrength());

            GoalStructureUpdateActionChunk su = World.NewGoalStructureUpdateActionChunk();
            GoalStructureUpdateActionChunk bu = World.NewGoalStructureUpdateActionChunk();

            su.Add(GoalStructure.RecognizedActions.SET_RESET, salute);
            bu.Add(GoalStructure.RecognizedActions.SET_RESET, bidFarewell);

            gse.Output.Add(su);
            gse.Output.Add(bu);

            gsm.SetRelevance(su, ab, 1);
            gsm.SetRelevance(bu, aut, 1);

            gsm.Commit(gse);

            John.Commit(gsm);

            John.MS.Parameters.CURRENT_GOAL_ACTIVATION_OPTION =
                MotivationalSubsystem.CurrentGoalActivationOptions.FULL;

            //Run the task
            Console.WriteLine("Running the Full Hello World Task");
            Console.SetOut(sw);

            SensoryInformation si;

            ExternalActionChunk chosen;

            for (int i = 0; i < NumberTrials; i++)
            {
                si = World.NewSensoryInformation(John);

                si[AffiliationBelongingnessDrive.MetaInfoReservations.STIMULUS, typeof(AffiliationBelongingnessDrive).Name] = 1;
                si[AutonomyDrive.MetaInfoReservations.STIMULUS, typeof(AutonomyDrive).Name] = 1;

                //Randomly choose an input to perceive.
                if (rand.NextDouble() < .5)
                {
                    //Say "Hello"
                    si.Add(hi, John.Parameters.MAX_ACTIVATION);
                    si.Add(bye, John.Parameters.MIN_ACTIVATION);
                }
                else
                {
                    //Say "Goodbye"
                    si.Add(hi, John.Parameters.MIN_ACTIVATION);
                    si.Add(bye, John.Parameters.MAX_ACTIVATION);
                }

                //Perceive the sensory information
                John.Perceive(si);

                //Choose an action
                chosen = John.GetChosenExternalAction(si);

                //Deliver appropriate feedback to the agent
                if (chosen == sayHi)
                {
                    //The agent said "Hello".
                    if (si[hi] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }
                else
                {
                    //The agent said "Goodbye".
                    if (si[bye] == John.Parameters.MAX_ACTIVATION)
                    {
                        //The agent responded correctly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was correct");
                        //Record the agent's success.
                        CorrectCounter++;
                        //Give positive feedback.
                        John.ReceiveFeedback(si, 1.0);
                    }
                    else
                    {
                        //The agent responded incorrectly
                        Trace.WriteLineIf(World.LoggingSwitch.TraceWarning, "John was incorrect");
                        //Give negative feedback.
                        John.ReceiveFeedback(si, 0.0);
                    }
                }

                Console.SetOut(orig);
                progress           = (int)(((double)(i + 1) / (double)NumberTrials) * 100);
                Console.CursorLeft = 0;
                Console.Write(progress + "% Complete..");
                Console.SetOut(sw);
            }

            //Report Results

            Console.WriteLine("Reporting Results for the Full Hello World Task");
            Console.WriteLine("John got " + CorrectCounter + " correct out of " + NumberTrials + " trials (" +
                              (int)Math.Round(((double)CorrectCounter / (double)NumberTrials) * 100) + "%)");

            Console.WriteLine("At the end of the task, John had learned the following rules:");
            foreach (var i in John.GetInternals(Agent.InternalContainers.ACTION_RULES))
            {
                Console.WriteLine(i);
            }

            sw.Close();
            Console.SetOut(orig);
            Console.CursorLeft = 0;
            Console.WriteLine("100% Complete..");
            //Kill the agent to end the task
            Console.WriteLine("Killing John to end the program");
            John.Die();
            Console.WriteLine("John is Dead");

            Console.WriteLine("The Full Hello World Task has finished");
            Console.WriteLine("The results have been saved to \"HelloWorldFull.txt\"");
            Console.Write("Press any key to exit");
            Console.ReadKey(true);
        }