Esempio n. 1
0
        /// <summary>
        /// Records the individual's Heading at each tick of the simulation.
        /// </summary>
        void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
        {
            // If this is the first update, initialize the sensor value accumulator
            if (!Initialized)
            {
                HeadingValues = new List <double>(exp.evaluationTime + 1);
                Initialized   = true;
            }

            if (ip.robots[0].Stopped)
            {
                // If this is the first update after the robot has Stopped,
                // set the endpoint to be the current simulation tick
                if (endTick == 0)
                {
                    HeadingValues.Add(ip.robots[0].Heading); // JUSTIN: Sample on the last tick (first tick of being Stopped) to fix an array overflow error
                    endTick = Convert.ToInt32(ip.timeSteps * exp.timestep);
                }
            }
            else
            {
                // Sample the robot's Heading on this tick.
                HeadingValues.Add(ip.robots[0].Heading);
            }
        }
        /// <summary>
        /// Constructs the behavior vector at the end of an individual evaluation.
        /// </summary>
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            // initialize the BC vector
            BehaviorVector = new List <double>();

            // If the robot never Stopped, set the max evaluation time as the end tick
            if (endTick == 0)
            {
                endTick = exp.evaluationTime;
            }

            // Calculate when to perform an update
            int numBehaviorChunks = VectorLength / 2;

            chunkSize = Convert.ToInt32(Math.Floor((double)endTick / (double)numBehaviorChunks)) * 2;

            float x, y;

            for (int chunkNum = 1; chunkNum < numBehaviorChunks + 1; chunkNum++)
            {
                // Take bc samples from the internal Trajectory store
                x = Trajectory[chunkNum * chunkSize - 2];
                x = (x - ip.env.AOIRectangle.Left) / ip.env.AOIRectangle.Width;
                BehaviorVector.Add(x);

                y = Trajectory[chunkNum * chunkSize - 1];
                y = (y - ip.env.AOIRectangle.Top) / ip.env.AOIRectangle.Height;
                BehaviorVector.Add(y);
            }

            return(BehaviorVector);
        }
        /// <summary>
        /// Constructs the behavior vector at the end of an individual evaluation.
        /// </summary>
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            // initialize the BC vector
            BehaviorVector = new List<double>();

            // If the robot never Stopped, set the max evaluation time as the end tick
            if (endTick == 0)
                endTick = exp.evaluationTime;

            // Calculate when to perform an update
            int numBehaviorChunks = VectorLength / 2;
            chunkSize = Convert.ToInt32(Math.Floor((double)endTick / (double)numBehaviorChunks)) * 2;

            float x, y;

            for (int chunkNum = 1; chunkNum < numBehaviorChunks + 1; chunkNum++)
            {
                // Take bc samples from the internal Trajectory store
                x = Trajectory[chunkNum * chunkSize - 2];
                x = (x - ip.env.AOIRectangle.Left) / ip.env.AOIRectangle.Width;
                BehaviorVector.Add(x);

                y = Trajectory[chunkNum * chunkSize - 1];
                y = (y - ip.env.AOIRectangle.Top) / ip.env.AOIRectangle.Height;
                BehaviorVector.Add(y);
            }

            return BehaviorVector;
        }
        /// <summary>
        /// Initializes the collision manager.
        /// </summary>
        /// <param name="env">Environment for simulator experiments</param>
        /// <param name="exp">Simulator experiment</param>
        /// <param name="robots">Set of robots that should be tested for collision</param>
        public override void initialize (Environment domain, SimulatorExperiment exp, List<Robot> robots)
		{
			Robots = robots;
			Exp=exp;
            if (exp is MultiAgentExperiment)
            {
                AgentCollide = ((MultiAgentExperiment)Exp).agentsCollide;
                AgentVisible = ((MultiAgentExperiment)Exp).agentsVisible;
            }
            Domain = domain;
		}
 /// <summary>
 /// Initializes the collision manager.
 /// </summary>
 /// <param name="env">Environment for simulator experiments</param>
 /// <param name="exp">Simulator experiment</param>
 /// <param name="robots">Set of robots that should be tested for collision</param>
 public override void initialize(Environment domain, SimulatorExperiment exp, List <Robot> robots)
 {
     Robots = robots;
     Exp    = exp;
     if (exp is MultiAgentExperiment)
     {
         AgentCollide = ((MultiAgentExperiment)Exp).agentsCollide;
         AgentVisible = ((MultiAgentExperiment)Exp).agentsVisible;
     }
     Domain = domain;
 }
Esempio n. 6
0
        /// <summary>
        /// Constructs the behavior vector at the end of an individual evaluation.
        /// </summary>
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            List <double> bc = new List <double>();

            for (int agentIndex = 0; agentIndex < ip.robots.Count; agentIndex++)
            {
                double x;
                double y;

                x = ip.robots[agentIndex].Location.X;
                y = ip.robots[agentIndex].Location.Y;
                x = (x - ip.env.AOIRectangle.Left) / ip.env.AOIRectangle.Width;
                y = (y - ip.env.AOIRectangle.Top) / ip.env.AOIRectangle.Height;
                bc.Insert(agentIndex * 2, x);
                bc.Insert(agentIndex * 2 + 1, y);
            }
            return(bc);
        }
Esempio n. 7
0
        /// <summary>
        /// Constructs the behavior vector at the end of an individual evaluation.
        /// </summary>
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            List<double> bc = new List<double>();

            for (int agentIndex = 0; agentIndex < ip.robots.Count; agentIndex++)
            {
                double x;
                double y;

                x = ip.robots[agentIndex].Location.X;
                y = ip.robots[agentIndex].Location.Y;
                x = (x - ip.env.AOIRectangle.Left) / ip.env.AOIRectangle.Width;
                y = (y - ip.env.AOIRectangle.Top) / ip.env.AOIRectangle.Height;
                bc.Insert(agentIndex * 2, x);
                bc.Insert(agentIndex * 2 + 1, y);

            }
            return bc;
        }
        /// <summary>
        /// Loads the environments specified in the experiment file.
        /// </summary>
        public static void loadEnvironments(SimulatorExperiment experiment)
        {
            experiment.environmentList.Clear();
            Environment scene = Environment.loadEnvironment(experiment.environmentName);

            experiment.environmentList.Add(scene);

            experiment.environment = scene;

            Console.Write("Looking for additional environments [" + scene.name + "] ... ");
            String filenamePrefix = scene.name.Substring(0, scene.name.Length - 4);
            int    num            = 1;
            String filename2      = filenamePrefix + num + ".xml";

            while (File.Exists(filename2))
            {
                Console.WriteLine("Found CurrentEnvironment: " + filename2 + "\n");
                experiment.environmentList.Add(Environment.loadEnvironment(filename2));
                num++;
                filename2 = filenamePrefix + num + ".xml";
            }
            Console.WriteLine("Done");
            Console.WriteLine(experiment.environmentList.Count.ToString() + " CurrentEnvironment(s) found.");
        }
        /// <summary>
        /// Records the individual's location at each tick of the simulation.
        /// </summary>
        void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
        {
            if (ip.robots[0].Stopped)
            {
                // If this is the first update after the robot has Stopped,
                // send the endpoint to be the current simulation tick
                if (endTick == 0)
                {
                    endTick = Convert.ToInt32(ip.timeSteps * exp.timestep);
                }
            }

            // initialize the Trajectory list
            if (!Initialized)
            {
                // initialize the sensor value sampling/storage components
                Trajectory  = new List <int>(exp.evaluationTime * 2);
                Initialized = true;
            }

            // update the Trajectory at every tick
            Trajectory.Add(Convert.ToInt16(ip.robots[0].Location.X));
            Trajectory.Add(Convert.ToInt16(ip.robots[0].Location.Y));
        }
        private void newExperimentMenuItem_Click(object sender, EventArgs e)
        {
            NewExperimentForm f = new NewExperimentForm();
            f.ShowDialog();

            if (f.simExp != null)
            {
                this.Text = "MultiAgent-HyperSharpNEAT Simulator - Unnamed";
                experiment = (SimulatorExperiment)f.simExp;
                experiment.substrateDescriptionFilename = "substrate.xml";
                experiment.environmentName = "default_environment.xml";
                experiment.setFitnessFunction("CoverageFitness");
                experiment.initialize();

                Invalidate();
            }
        }
 private void startToolStripMenuItem_Click(object sender, EventArgs e)
 {
     if (!bEvolve)
     {
         evolutionExperiment = new MultiAgentExperiment((MultiAgentExperiment)experiment);
         evolutionExperiment.initialize();
         bEvolve = true;
         evolveThread = new Thread(doEvolution);
         evolveThread.Start();
     }
     else //if it was already on, turn it off
     {
         bEvolve = false;
         evolveThread.Join();
         
     }
     startToolStripMenuItem.Checked = bEvolve;
 }
        private void loadExperimentToolStripMenuItem_Click(object sender, EventArgs e)
        {
            fileOpenDialog.Title = "Open Experiment";
             DialogResult res = fileOpenDialog.ShowDialog(this);
            if (res == DialogResult.OK || res == DialogResult.Yes)
            {
                string filename = fileOpenDialog.FileName;
                try
                {
                    ExperimentWrapper wr = ExperimentWrapper.load(filename);
                    this.Text = "MultiAgent-HyperSharpNEAT Simulator - " + filename;
                    experiment = wr.experiment;
                    experiment.initialize();

                    Invalidate();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                }
            }
        }
Esempio n. 13
0
        /// <summary>
        /// Constructs the behavior vector at the end of an individual evaluation.
        /// </summary>
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            // Initialize the BC vector
            BehaviorVector = new List<double>();
            // If the robot never Stopped, set the max evaluation time as the end tick
            if (endTick == 0)
                endTick = exp.evaluationTime - 1;
            // Calculate when to perform an update
            chunkSize = Convert.ToInt32(Math.Floor((double)endTick / (double)NumIntervals));

            int[] headingBins;
            for (int chunkNum = 0; chunkNum < NumIntervals; chunkNum++)
            {
                // Reset the accumulators / Heading bins for this time slice.
                headingBins = new int[] { 0, 0, 0, 0 };
                double temp;
                // Fill the Heading bins for this time slice.
                for (int j = 0; j < chunkSize; j++)
                {
                    if ((chunkNum * chunkSize + j) >= HeadingValues.Count)
                    {
                        continue;
                    }
                    temp = HeadingValues[chunkNum * chunkSize + j];
                    temp *= 57.297f; // convert radians to degrees

                    // Convert the Heading to the maxRange 0-360
                    while (temp > 360)
                        temp -= 360;
                    while (temp < 0)
                        temp += 360;

                    if ((temp < 45 || temp >= 315))
                    {
                        headingBins[0] += 1;
                    }
                    else if (temp >= 45 && temp < 135)
                    {
                        headingBins[1] += 1;
                    }
                    else if (temp >= 135 && temp < 225)
                    {
                        headingBins[2] += 1;
                    }
                    else if (temp >= 225 && temp < 315)
                    {
                        headingBins[3] += 1;
                    }
                    else
                    {
                        Console.WriteLine("ERROR: Unrecognized Heading! Something wrong (DirectionBC). What happen.");
                    }
                }

                // Now figure out which bin had the majority and assign the corresponding discrete value to the BC
                int majorityIndex = 0;
                int majorityCount = -1;
                for (int i = 0; i < headingBins.Length; i++)
                {
                    if (headingBins[i] > majorityCount)
                    {
                        majorityIndex = i;
                        majorityCount = headingBins[i];
                    }
                }

                BehaviorVector.Add(0.125 + (majorityIndex * 0.250));
            }

            return BehaviorVector;
        }
Esempio n. 14
0
 /// <summary>
 /// Creates a new Experiment object by copying an existing SimulatorExperiment.
 /// </summary>
 public Experiment(SimulatorExperiment experiment)
 {
     simExp             = experiment;
     _inputNeuronCount  = experiment.getNumCPPNInputs();
     _outputNeuronCount = experiment.getNumCPPNOutputs();
 }
		public abstract void initialize(Environment environment, SimulatorExperiment experiment, List<Robot> robots);
Esempio n. 16
0
 /// <summary>
 /// Creates a new Experiment object by copying an existing SimulatorExperiment.
 /// </summary>
 public Experiment(SimulatorExperiment experiment)
 {
     simExp = experiment;
     _inputNeuronCount = experiment.getNumCPPNInputs();
     _outputNeuronCount = experiment.getNumCPPNOutputs();
 }
        /// <summary>
        /// Records the individual's location at each tick of the simulation. 
        /// </summary>
        void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
        {
            if (ip.robots[0].Stopped)
            {
                // If this is the first update after the robot has Stopped, 
                // send the endpoint to be the current simulation tick
                if (endTick == 0)
                    endTick = Convert.ToInt32(ip.timeSteps * exp.timestep);
            }

            // initialize the Trajectory list
            if (!Initialized)
            {
                // initialize the sensor value sampling/storage components
                Trajectory = new List<int>(exp.evaluationTime * 2);
                Initialized = true;
            }

            // update the Trajectory at every tick
            Trajectory.Add(Convert.ToInt16(ip.robots[0].Location.X));
            Trajectory.Add(Convert.ToInt16(ip.robots[0].Location.Y));
        }
        /// <summary>
        /// Called on every simulation tick. If this is the first tick, initialize some instance variables. On every other tick, check to see if the robot is within stopping Distance of the goal.
        /// </summary>
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Initialization routines
            if (first)
            {
                first = false;

                if (DEBUG)
                {
                    // If debugging: Add POI at thirds along the border of the AOI rectangle, so we can arrange the maze properly
                    // This functionality helps align the maze with the MAP-Elites grid.
                    environment.POIPosition.Clear();
                    if (!DEBUG_CLEARONLY)
                    {
                        double dwidth = environment.AOIRectangle.Width / NumBinsPerDim;
                        double dheight = environment.AOIRectangle.Height / NumBinsPerDim;
                        double cornerx = environment.AOIRectangle.Left;
                        double cornery = environment.AOIRectangle.Top;
                        for (int x = 0; x <= NumBinsPerDim; x++)
                        {
                            for (int y = 0; y <= NumBinsPerDim; y++)
                            {
                                environment.POIPosition.Add(new Point((int)(cornerx + dwidth * x), (int)(cornery + dheight * y)));
                            }
                        }
                    }
                }

                // Compute the max possible Distance a robot can achieve from the goal point while staying in the AOI bounds
                double maxSoFar = 0;

                // Top left
                Point2D cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Left, environment.AOIRectangle.Top);
                double tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar) maxSoFar = tempDist;

                // Top right
                cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Right, environment.AOIRectangle.Top);
                tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar) maxSoFar = tempDist;

                // Bottom right
                cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Right, environment.AOIRectangle.Bottom);
                tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar) maxSoFar = tempDist;

                // Bottom left
                cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Left, environment.AOIRectangle.Bottom);
                tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar) maxSoFar = tempDist;

                // Define the Distance (that will be used to calculate fitness values) (add a small value (10) to give a little breathing room)
                MaxDistanceToGoal = maxSoFar + 10;
            }

            if (ip.robots[0].Location.squaredDistance(environment.goal_point) < StoppingRange)
            {
                ip.robots[0].Stopped = true;
            }
            return;
        }
 /// <summary>
 /// Calculates the fitness of an individual based on Distance to the goal. Not compatible with multiagent teams.
 /// </summary>
 double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
 {
     objectives = null;
     return (MaxDistanceToGoal - ip.robots[0].Location.distance(environment.goal_point)) / MaxDistanceToGoal * 1000;
 }
Esempio n. 20
0
        /// <summary>
        /// Constructs the behavior vector at the end of an individual evaluation.
        /// </summary>
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            // Initialize the BC vector
            BehaviorVector = new List <double>();
            // If the robot never Stopped, set the max evaluation time as the end tick
            if (endTick == 0)
            {
                endTick = exp.evaluationTime - 1;
            }
            // Calculate when to perform an update
            chunkSize = Convert.ToInt32(Math.Floor((double)endTick / (double)NumIntervals));

            int[] headingBins;
            for (int chunkNum = 0; chunkNum < NumIntervals; chunkNum++)
            {
                // Reset the accumulators / Heading bins for this time slice.
                headingBins = new int[] { 0, 0, 0, 0 };
                double temp;
                // Fill the Heading bins for this time slice.
                for (int j = 0; j < chunkSize; j++)
                {
                    if ((chunkNum * chunkSize + j) >= HeadingValues.Count)
                    {
                        continue;
                    }
                    temp  = HeadingValues[chunkNum * chunkSize + j];
                    temp *= 57.297f; // convert radians to degrees

                    // Convert the Heading to the maxRange 0-360
                    while (temp > 360)
                    {
                        temp -= 360;
                    }
                    while (temp < 0)
                    {
                        temp += 360;
                    }

                    if ((temp < 45 || temp >= 315))
                    {
                        headingBins[0] += 1;
                    }
                    else if (temp >= 45 && temp < 135)
                    {
                        headingBins[1] += 1;
                    }
                    else if (temp >= 135 && temp < 225)
                    {
                        headingBins[2] += 1;
                    }
                    else if (temp >= 225 && temp < 315)
                    {
                        headingBins[3] += 1;
                    }
                    else
                    {
                        Console.WriteLine("ERROR: Unrecognized Heading! Something wrong (DirectionBC). What happen.");
                    }
                }

                // Now figure out which bin had the majority and assign the corresponding discrete value to the BC
                int majorityIndex = 0;
                int majorityCount = -1;
                for (int i = 0; i < headingBins.Length; i++)
                {
                    if (headingBins[i] > majorityCount)
                    {
                        majorityIndex = i;
                        majorityCount = headingBins[i];
                    }
                }

                BehaviorVector.Add(0.125 + (majorityIndex * 0.250));
            }

            return(BehaviorVector);
        }
        public SimulatorVisualizer(string experimentName,string genome)
        {
            frame = new CoordinateFrame(0.0f, 0.0f, 5.0f, 0.0f);

			ExperimentWrapper wr = ExperimentWrapper.load(experimentName);

            experiment = wr.experiment;
			if(genome!=null) {
			 experiment.loadGenome(genome);
			}
			
            experiment.initialize();
			
			
			frame.syncFromEnvironment(experiment.environment);

            selected_POI = -1;
            bDrawFOV = false;
            bDrawFOV = false;
            bDrawLabel = true;
            displayAOIRectangle = false;
            displayEvolution = true;

            bEvolve = false;                    //by default we are not evolving, just displaying the CurrentEnvironment
            drawMode = drawModes.selectMode;    //default mode is selecting and moving walls
            selectMode = selectModes.dragMode;

            display_tempwall = false;           //no temporary wall exists at creation
            display_snap = false;               //we have nothing to snap to at creation

            InitializeComponent();

            this.Text = "MultiAgent-HyperSharpNEAT Simulator - " + experimentName;
            //set up double buffering
            this.SetStyle(
              ControlStyles.AllPaintingInWmPaint |
              ControlStyles.UserPaint |
              ControlStyles.DoubleBuffer, true);
        }
 /// <summary>
 /// Constructs a new HyperNEATEvolver object from an existing SimulatorExperiment domain object.
 /// </summary>
 /// <param name="simExp"></param>
 public HyperNEATEvolver(SimulatorExperiment simExperiment)
 {
     SimExperiment            = simExperiment;
     experiment               = new Experiment(SimExperiment);
     NumTrajectoriesPerFolder = SimExperiment.populationSize;
 }
        //Display the best genome
        private void toggleDisplayToolStripMenuItem_Click(object sender, EventArgs e)
        {
            evolveMutex.WaitOne();

            if (evolveGenome != null)
            {
                NeatGenome genome;
                genome = new NeatGenome(evolveGenome, 0);
                experiment = new MultiAgentExperiment((MultiAgentExperiment)evolutionExperiment);
                experiment.bestGenomeSoFar = genome;
                experiment.genome = genome;
                experiment.initialize();
                //step_size = 0.01f;
                experiment.running = true;
                setRunningState();
            }

            evolveMutex.ReleaseMutex();
        }
Esempio n. 24
0
 void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
 {
 }
Esempio n. 25
0
 void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip) { }
Esempio n. 26
0
 public abstract void initialize(Environment environment, SimulatorExperiment experiment, List <Robot> robots);
Esempio n. 27
0
 /// <summary>
 /// Creates a new NetworkEvaluator object from the specified parameters.
 /// </summary>
 /// <param name="experiment">An experimental domain in which networks will be evaluated</param>
 public NetworkEvaluator(SimulatorExperiment experiment)
 {
     Experiment = experiment;
 }
 /// <summary>
 /// Creates a new NetworkEvaluator object from the specified parameters.
 /// </summary>
 /// <param name="experiment">An experimental domain in which networks will be evaluated</param>
 public NetworkEvaluator(SimulatorExperiment experiment)
 {
     Experiment = experiment;
 }
        /// <summary>
        /// Loads the environments specified in the experiment file.
        /// </summary>
        public static void loadEnvironments(SimulatorExperiment experiment)
        {
            experiment.environmentList.Clear();
            Environment scene = Environment.loadEnvironment(experiment.environmentName);
            experiment.environmentList.Add(scene);

            experiment.environment = scene;

            Console.Write("Looking for additional environments [" + scene.name + "] ... ");
            String filenamePrefix = scene.name.Substring(0, scene.name.Length - 4);
            int num = 1;
            String filename2 = filenamePrefix + num + ".xml";
            while (File.Exists(filename2))
            {
                Console.WriteLine("Found CurrentEnvironment: " + filename2 + "\n");
                experiment.environmentList.Add(Environment.loadEnvironment(filename2));
                num++;
                filename2 = filenamePrefix + num + ".xml";
            }
            Console.WriteLine("Done");
            Console.WriteLine(experiment.environmentList.Count.ToString() + " CurrentEnvironment(s) found.");
        }
 /// <summary>
 /// Calculates the fitness of an individual based on Distance to the goal. Not compatible with multiagent teams.
 /// </summary>
 double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
 {
     objectives = null;
     return((MaxDistanceToGoal - ip.robots[0].Location.distance(environment.goal_point)) / MaxDistanceToGoal * 1000);
 }
        /// <summary>
        /// Called on every simulation tick. If this is the first tick, initialize some instance variables. On every other tick, check to see if the robot is within stopping Distance of the goal.
        /// </summary>
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Initialization routines
            if (first)
            {
                first = false;

                if (DEBUG)
                {
                    // If debugging: Add POI at thirds along the border of the AOI rectangle, so we can arrange the maze properly
                    // This functionality helps align the maze with the MAP-Elites grid.
                    environment.POIPosition.Clear();
                    if (!DEBUG_CLEARONLY)
                    {
                        double dwidth  = environment.AOIRectangle.Width / NumBinsPerDim;
                        double dheight = environment.AOIRectangle.Height / NumBinsPerDim;
                        double cornerx = environment.AOIRectangle.Left;
                        double cornery = environment.AOIRectangle.Top;
                        for (int x = 0; x <= NumBinsPerDim; x++)
                        {
                            for (int y = 0; y <= NumBinsPerDim; y++)
                            {
                                environment.POIPosition.Add(new Point((int)(cornerx + dwidth * x), (int)(cornery + dheight * y)));
                            }
                        }
                    }
                }

                // Compute the max possible Distance a robot can achieve from the goal point while staying in the AOI bounds
                double maxSoFar = 0;

                // Top left
                Point2D cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Left, environment.AOIRectangle.Top);
                double  tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar)
                {
                    maxSoFar = tempDist;
                }

                // Top right
                cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Right, environment.AOIRectangle.Top);
                tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar)
                {
                    maxSoFar = tempDist;
                }

                // Bottom right
                cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Right, environment.AOIRectangle.Bottom);
                tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar)
                {
                    maxSoFar = tempDist;
                }

                // Bottom left
                cornerWeCareAboutRightNow = new Point2D(environment.AOIRectangle.Left, environment.AOIRectangle.Bottom);
                tempDist = environment.goal_point.distance(cornerWeCareAboutRightNow);
                if (tempDist > maxSoFar)
                {
                    maxSoFar = tempDist;
                }

                // Define the Distance (that will be used to calculate fitness values) (add a small value (10) to give a little breathing room)
                MaxDistanceToGoal = maxSoFar + 10;
            }

            if (ip.robots[0].Location.squaredDistance(environment.goal_point) < StoppingRange)
            {
                ip.robots[0].Stopped = true;
            }
            return;
        }
Esempio n. 32
0
        /// <summary>
        /// Records the individual's Heading at each tick of the simulation. 
        /// </summary>
        void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
        {
            // If this is the first update, initialize the sensor value accumulator
            if (!Initialized)
            {
                HeadingValues = new List<double>(exp.evaluationTime + 1);
                Initialized = true;
            }

            if (ip.robots[0].Stopped)
            {
                // If this is the first update after the robot has Stopped, 
                // set the endpoint to be the current simulation tick
                if (endTick == 0)
                {
                    HeadingValues.Add(ip.robots[0].Heading); // JUSTIN: Sample on the last tick (first tick of being Stopped) to fix an array overflow error
                    endTick = Convert.ToInt32(ip.timeSteps * exp.timestep);
                }
            }
            else
            {
                // Sample the robot's Heading on this tick.
                HeadingValues.Add(ip.robots[0].Heading);
            }
        }
 /// <summary>
 /// Constructs a new HyperNEATEvolver object from an existing SimulatorExperiment domain object.
 /// </summary>
 /// <param name="simExp"></param>
 public HyperNEATEvolver(SimulatorExperiment simExperiment)
 {
     SimExperiment = simExperiment;
     experiment = new Experiment(SimExperiment);
     NumTrajectoriesPerFolder = SimExperiment.populationSize;
 }
Esempio n. 34
0
        public static void Main(string[] args)
        {
            #region Instance variables

            string folder = "";
            int    generations = 1001, populationSize = 0;
            int    numTrajectoriesPerFolder = 0;
            string experimentName           = null;
            bool   doMapElites              = false;
            bool   doMEnoveltyPressure      = false;
            bool   doNSLC                   = false;
            bool   doNS                   = false;
            bool   recordTrajectory       = false; // tracks Trajectory by dumping all trajectories to file
            bool   recordTrajectorySimple = false; // Used to track Trajectory on genomes WITHOUT dumping every single Trajectory to file (but it will dump to the mapelites grid if tracked, or just to champion genomes that are printed out)
            bool   recordEndpoints        = false;
            bool   doMapelitesStyleGrid   = false; // used to tell algorithms other than ME to keep track of a grid (in the style of mapelites) and report it at the end

            #endregion

            if (args.Length != 0 && args[0] == "-help")
            {
                showHelp();
                return;
            }

            if (!(args.Length == 0) && args[0] == "evolve")
            {
                for (int j = 1; j < args.Length; j++)
                {
                    if (j <= args.Length - 2)
                    {
                        switch (args[j])
                        {
                        case "-ns":     // Novelty Search 1.0 (this is novelty search with a large non-breeding archive kept on the side)
                            doNS = true;
                            break;

                        case "-nslc":     // Steady-State NSLC
                            doNSLC = true;
                            break;

                        case "-menovelty":
                            doMEnoveltyPressure = true;
                            break;

                        case "-mapelites":
                            doMapElites = true;
                            break;

                        case "-trackmegrid":
                            doMapelitesStyleGrid = true;
                            break;

                        case "-experiment":
                            experimentName = args[++j];
                            break;

                        case "-generations": if (!int.TryParse(args[++j], out generations))
                            {
                                Console.WriteLine("Invalid number of generations specified.");
                            }
                            break;

                        case "-folder": folder = args[++j];
                            Console.WriteLine("Attempting to output to folder " + folder);
                            break;

                        case "-recordTrajectory":
                            recordTrajectory = true;
                            break;

                        case "-recordTrajectorySimple":
                            recordTrajectory       = true;
                            recordTrajectorySimple = true;
                            break;

                        case "-NumTrajectoriesPerFolder":
                            numTrajectoriesPerFolder = Convert.ToInt32(args[++j]);
                            break;

                        case "-recordEndpoints":
                            recordEndpoints = true;
                            break;
                        }
                    }
                }

                if (experimentName == null)
                {
                    Console.WriteLine("Missing [experimentName].");
                    Console.WriteLine("See help \"-help\"");
                    return;
                }

                ExperimentWrapper   wr         = ExperimentWrapper.load(experimentName);
                SimulatorExperiment experiment = wr.experiment;

                if (populationSize != 0)
                {
                    experiment.populationSize = populationSize;
                }
                else
                {
                    populationSize = experiment.populationSize;
                }

                experiment.initialize();
                HyperNEATEvolver evolve = new HyperNEATEvolver(experiment);

                if (doNS)
                {
                    evolve.experiment.DefaultNeatParameters.NS1            = true;
                    evolve.experiment.DefaultNeatParameters.NS2            = true;
                    evolve.experiment.DefaultNeatParameters.NS2_archiveCap = evolve.experiment.DefaultNeatParameters.NS1_popsize;
                }

                if (doMapElites)
                {
                    evolve.experiment.DefaultNeatParameters.mapelites = true;
                    if (doMEnoveltyPressure)
                    {
                        evolve.experiment.DefaultNeatParameters.me_noveltyPressure = true;
                    }
                }

                if (doNSLC)
                {
                    evolve.experiment.DefaultNeatParameters.NS2_archiveCap = evolve.experiment.DefaultNeatParameters.NS1_popsize;
                    evolve.experiment.DefaultNeatParameters.NSLC           = true;
                }

                if (recordEndpoints)
                {
                    if (experiment is SimulatorExperiment)
                    {
                        ((SimulatorExperiment)experiment).recordEndpoints = true;
                    }
                    evolve.FinalPositionLogging = true;
                }

                if (recordTrajectory)
                {
                    if (experiment is SimulatorExperiment)
                    {
                        ((SimulatorExperiment)experiment).recordTrajectories = true;
                    }
                    evolve.TrajectoryLogging = true;

                    if (numTrajectoriesPerFolder != 0)
                    {
                        evolve.setNumTrajectoriesPerFolder(numTrajectoriesPerFolder);
                    }
                }
                if (recordTrajectorySimple)
                {
                    evolve.TrajectoryLogging = false;
                }

                if (doMapelitesStyleGrid)
                {
                    evolve.experiment.DefaultNeatParameters.track_me_grid = true;
                }

                evolve.setOutputFolder(folder);
                evolve.NEATBrain = experiment.neatBrain;
                evolve.initializeEvolution(populationSize);
                evolve.evolve(generations);
            }
            else // Run the GUI version.
            {
                experimentName = "QDExperiment.xml";
                SimulatorVisualizer vis = new SimulatorVisualizer(experimentName, null);
                vis.Refresh();
                Application.Run(vis);
                vis.Refresh();
            }
        }