Exemplo n.º 1
0
 public Experiment(SimulatorExperiment exp)
 {
     simExp           = exp;
     inputs           = exp.getNumCPPNInputs();
     outputs          = exp.getNumCPPNOutputs();
     outputsPerPolicy = exp.getNumCPPNOutputsPerModule();
 }
Exemplo n.º 2
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {


            double fitness = 0.0f;
            objectives = null;

            if (environment.name.Equals("ENV_dual_task.xml")) //HACK navigation
            {
                fitness = (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[4].X, environment.POIPosition[4].Y)) / 650.0f);
 
                 if (fitness < 0) fitness = 0.00001f;
                 if (reachedGoal) fitness = 1;

               //  fitness = 1;
            }
            else   //food gathering 
            {
                float distFood = (float)(1.0f - (engine.robots[0].location.distance(environment.goal_point) / environment.maxDistance));
                fitness = (collectedFood + distFood) / 4.0f;

                if (fitness < 0) fitness = 0.00001f;
                if (collectedFood >= 4) fitness = 1.0f;

             //  fitness = 1;
            }
         
            return fitness;
        }
Exemplo n.º 3
0
 public Experiment(SimulatorExperiment exp)
 {
     simExp = exp;
     inputs = exp.getNumCPPNInputs();
     outputs = exp.getNumCPPNOutputs();
     outputsPerPolicy = exp.getNumCPPNOutputsPerModule();
 }
Exemplo n.º 4
0
        //characterizing behavior...
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp)
        {   
			bool disabled=false;
			for(int i=0;i<exp.robots.Count;i++) {
			if(exp.robots[i].disabled) disabled=true;
			}
			
			for(int i=0;i<exp.robots.Count;i++) {
			 double minx=1000,miny=1000,maxx=-1000,maxy=-1000;
			 for(int j=0;j<samples;j++) {
				if(xc[i,j]<minx) minx=xc[i,j];
				if(xc[i,j]>maxx) maxx=xc[i,j];
				if(yc[i,j]<miny) miny=yc[i,j];
				if(yc[i,j]>maxy) maxy=yc[i,j];
			 }
			 disabled=false;//disable for now...
			 if(disabled) {
			  minx *= -0.1;
			  maxx *= -0.1;
			  miny *= -0.1;
			  maxy *= -0.1;
			 }
			 //bc.Add(minx);
			 bc.Add(miny);
			 //bc.Add(maxx);
			 bc.Add(maxy);
			 
			}

		//	Console.WriteLine(bc.Count.ToString());
            return new List<double>(bc);
        }
Exemplo n.º 5
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives    = new double[6];
            objectives[0] = (accum + 2.5 * stopaccum) / 1000.0;
            //objectives[0]=accum;
            //objectives[1]=stopaccum;

            double travel = 0.0;
            double delta  = 0.0;
            double sight  = 0.0;

            foreach (Robot r in ip.robots)
            {
                delta  += ((Khepera3RobotModel)r).totDelta;        //r.collisions; //sensorList[i];//engine.robots[i].collisions;
                sight  += ((Khepera3RobotModel)r).totSight;
                travel += r.dist_trav;
            }
            objectives[1] = -delta;            // + -sight; //-Math.Log(delta+1.0)/10.0;
            //objectives[2]= -Math.Log(sight+1.0)/10.0;
            //objectives[1]= -coll_count;
            //objectives[1]=travel;
            //Console.WriteLine(ip.robots.Count);
            double fitness = (accum + 3.5 * stopaccum) * (3.0 * (ip.robots.Count - 3)) - delta * 20 + 0.0001;

            return(fitness);
        }
Exemplo n.º 6
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
           double fitness = 0.0f;

            //fitness += bonus;
            for (int i = 0; i < reachedPOI.Length; i++)
            {
                if (reachedPOI[i])
                {
                    fitness += 1.0f;
                }
                else
                {
                    fitness += (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[i].X, environment.POIPosition[i].Y)) / 650.0f);
                    break;
                }
            }
            if (reachedGoal)
            {
                fitness = 10.0f;
            }

            objectives = null;
            return fitness;
        }
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            double fitness = 0.0f;

            objectives = null;
            // Only acknowledge actual eating of food, not just proximity
            fitness = collectedFood / 4.0f;

            if (collectedFood >= 4)
            {
                fitness = 1.0f;
            }

            // Schrum
            // HACK: Both environments are the same, but this hack allows one to treat the food as poison
            bool poison = !environment.name.Equals("ENV_dual_task1.xml");

            // Extra aspect of the HACK: The first task loaded excludes its path from the name, but this is not
            // the case for the second task loaded. This is why the negation is used instead of looking up the second
            // task directly, which is named ENV_dual_task11.xml (convention of simulator)
            if (poison)
            {
                fitness *= -0.9;
            }

            // Schrum: For troubleshooting
            //Console.WriteLine(environment.name + " " + fitness + " " + poison);

            return(fitness);
        }
Exemplo n.º 8
0
        public void update(SimulatorExperiment engine, Environment environment)
        {
            if (first)
            {
                distances = new double[environment.POIPosition.Count, engine.robots.Count];
                for (int v = 0; v < environment.POIPosition.Count; v++)
                {
                    for (int w = 0; w < engine.robots.Count; w++)
                    {
                        distances[v, w] = radius;
                    }
                }
                first = false;
                return;
            }

            double dist = 0;

            for (int k = 0; k < engine.robots.Count; k++)
            {
                Robot r = engine.robots[k];
                for (int j = 0; j < environment.POIPosition.Count; j++)
                {
                    dist = EngineUtilities.euclideanDistance(r.location, new Point2D(environment.POIPosition[j].X, environment.POIPosition[j].Y));
                    if (dist < distances[j, k])
                    {
                        distances[j, k] = dist;
                    }
                }
            }
        }
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment,instance_pack ip)
        {
			
            if (!(Experiment.timeSteps % (int)(1 / Experiment.timestep) == 0))
            {
                //grid.decay_viewed(0);
                return;
            }
		
			foreach(Robot r in ip.robots) {
				if (!r.autopilot) {
					foreach(ISensor s in r.sensors) 
						if(s is SignalSensor) {
						 SignalSensor ss = (SignalSensor)s;
						 double val = ss.get_value();
						 val+=0.05;
						 if(val>1.0) val=1.0;
						 ss.setSignal(val);
						}
				}
			}
			
			
			
			double x1=(double)environment.AOIRectangle.Left;		
			double y1=(double)environment.AOIRectangle.Top;		
			double x2=(double)environment.AOIRectangle.Right;		
			double y2=(double)environment.AOIRectangle.Bottom;		
			int steps=10;
			accum+=test_interpolation(ip,x1,y1,x2,y1,steps);
			accum+=test_interpolation(ip,x2,y1,x2,y2,steps);
			accum+=test_interpolation(ip,x2,y2,x2,y1,steps);
			accum+=test_interpolation(ip,x2,y1,x1,y1,steps);

        }
        /*
         * //Loads a environment from an XML file and initializes it
         * public SimulatorExperiment load(string name)
         * {
         *  System.Xml.Serialization.XmlSerializer x = new System.Xml.Serialization.XmlSerializer(typeof(SimulatorExperiment));
         *  TextReader infile = new StreamReader(name);
         *  SimulatorExperiment e = (SimulatorExperiment)x.Deserialize(infile);
         *  infile.Close();
         *
         *  e.substrateDescription = new SubstrateDescription(e.substrateDescriptionFilename);
         *
         *  e.fitnessFunction = FitnessFunctionFactory.getFitnessFunction(e.fitnessFunctionName);
         *
         *  loadEnvironments(e);
         *
         *  return e;
         * }*/

        public static void loadEnvironments(SimulatorExperiment e)
        {
            //Only reload environment if it isn't loaded yet, otherwise changes to the environment are lost
            //TODO make sure
            if (e.environmentList.Count == 0)
            {
                //e.environmentList.Clear();
                Engine.Environment scene = Engine.Environment.loadEnvironment(e.environmentName);
                e.environmentList.Add(scene);

                e.environment = scene;

                Console.Write("Looking for additional environments [" + scene.name + "] ... ");
                String filenamePrefix = scene.name.Substring(0, scene.name.Length - 4);
                int    num            = 1;
                String filename2      = filenamePrefix + num + ".xml";
                while (File.Exists(filename2))
                {
                    Console.WriteLine("Found environment: " + filename2 + "\n");
                    e.environmentList.Add(Engine.Environment.loadEnvironment(filename2));
                    num++;
                    filename2 = filenamePrefix + num + ".xml";
                }
                Console.WriteLine("Done");
                Console.WriteLine(e.environmentList.Count.ToString() + " environment(s) found.");
            }
        }
Exemplo n.º 11
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            double fitness = 0.0f;

            //fitness += bonus;
            for (int i = 0; i < reachedPOI.Length; i++)
            {
                if (reachedPOI[i])
                {
                    fitness += 1.0f;
                }
                else
                {
                    fitness += (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[i].X, environment.POIPosition[i].Y)) / 650.0f);
                    break;
                }
            }
            if (reachedGoal)
            {
                fitness = 10.0f;
            }

            objectives = null;
            return(fitness);
        }
Exemplo n.º 12
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {


            double fitness = 0.0f;
            objectives = null;

            // ENV_dual_task.xml is the hallway navigation environment of the dual task
            // FourTasks-ENV2.xml is a direct copy of ENV_dual_task.xml, but the copy is
            // required to satisfy the way that the simulator runs multiple environments.
            if (environment.name.EndsWith("ENV_dual_task.xml") || environment.name.EndsWith("FourTasks-ENV2.xml")) //HACK navigation
            {
                fitness = (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[4].X, environment.POIPosition[4].Y)) / 650.0f);
 
                 if (fitness < 0) fitness = 0.00001f;
                 if (reachedGoal) fitness = 1;

               //  fitness = 1;
            }
            else   //food gathering 
            {
                float distFood = (float)(1.0f - (engine.robots[0].location.distance(environment.goal_point) / environment.maxDistance));
                fitness = (collectedFood + distFood) / 4.0f;

                if (fitness < 0) fitness = 0.00001f;
                if (collectedFood >= 4) fitness = 1.0f;

             //  fitness = 1;
            }
         
            return fitness;
        }
Exemplo n.º 13
0
        public POIFIT_MO()
        {
            first = false;
            const int maxsamples = 100;
            const int maxrobots  = 10;

            reachGoal  = new bool[maxrobots];
            travelList = new List <int> [maxrobots];
            for (int i = 0; i < maxrobots; i++)
            {
                travelList[i] = new List <int>();
            }
            currentLocation = new int[maxrobots];
            endList         = new double[maxrobots];
            gotoList        = new double[maxrobots];
            origDist        = new double[maxrobots];
            origHeadings    = new double[maxrobots];

            turned       = new bool[maxrobots];
            reachList    = new bool[maxrobots];
            takenList    = new int[maxrobots];
            sensorList   = new double[maxrobots];
            theengine    = null;
            allClose     = false;
            allCorrected = false;
            this.reset0();
        }
Exemplo n.º 14
0
        void IFitnessFunction.update(SimulatorExperiment simExp, Environment environment, instance_pack ip)
        {
            // For brain switching by multitask.
            // Schrum: If not using preference neurons, and the current brain does not match the environment
            if (simExp.multibrain && !simExp.preferenceNeurons && ip.agentBrain.getBrainCounter() != currentEnvironment)
            {
                // Schrum: get the appropriate brain for this environment
                ip.agentBrain.switchBrains(currentEnvironment);
            }


            if (currentEnvironment == TASK_TEAM_PATROL)
            { // Team patrol
                teamPatrol.update(simExp, environment, ip);
            }
            else if (currentEnvironment == TASK_LONE_PATROL)
            { // Lone patrol
                lonePatrol.update(simExp, environment, ip);
            }
            else if (currentEnvironment == TASK_DUAL_TASK_HALLWAY || currentEnvironment == TASK_DUAL_TASK_FORAGE)
            { // Dual task
                dualTask.update(simExp, environment, ip);
            }
            else if (currentEnvironment == TASK_TWO_ROOMS)
            { // Two rooms
                twoRooms.update(simExp, environment, ip);
            }
            else
            {
                Console.WriteLine("Error! Unknown environment! " + environment.name + ":" + currentEnvironment);
                System.Environment.Exit(1);
            }
        }
Exemplo n.º 15
0
        //characterizing behavior...
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp)
        {
            double mult     = 1.0;
            bool   disabled = false;

            for (int i = 0; i < exp.robots.Count; i++)
            {
                if (exp.robots[i].disabled)
                {
                    disabled = true;
                }
            }
            if (disabled)
            {
                mult = -0.1;
            }

            for (int x = 0; x < dim; x++)
            {
                for (int y = 0; y < dim; y++)
                {
                    bc.Add((grid.grid[x, y].avg_idle + 1) / 10000.0 * mult);
                }
            }
            //	Console.WriteLine(bc.Count.ToString());
            return(new List <double>(bc));
        }
Exemplo n.º 16
0
        public void update(SimulatorExperiment engine, Environment environment)
        {
            if (first)
            {
                distances = new double[environment.POIPosition.Count, engine.robots.Count];
                for (int v = 0; v < environment.POIPosition.Count; v++)
                    for (int w = 0; w < engine.robots.Count; w++)
                        distances[v, w] = radius;
                first = false;
                return;
            }

            double dist = 0;
            for(int k=0;k<engine.robots.Count;k++)
            {
                Robot r = engine.robots[k];
                for (int j = 0; j < environment.POIPosition.Count; j++)
                {
                    dist = EngineUtilities.euclideanDistance(r.location, new Point2D(environment.POIPosition[j].X, environment.POIPosition[j].Y));
                    if (dist < distances[j, k])
                        distances[j, k] = dist;
                }

            }
        }
Exemplo n.º 17
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives = new double[6];
            double trackingFitness = 0.0f;

            if (avgLoc == null) return 1;
            /*
            for (int i = 0; i < reachedPOI.Length; i++)
            {
                if (reachedPOI[i])
                    trackingFitness += 1.0f;
                else
                {
                    double dist = avgLoc.distance(new Point2D((int)environment.POIPosition[i].X, (int)environment.POIPosition[i].Y));
                    trackingFitness += ((1.0f - (dist / environment.maxDistance)) * 0.5);
                    break;
                }
            }*/
            if (reachedGoal)
            {
                trackingFitness = 10.0f;
            }
            else
            {
                double dist = avgLoc.distance(environment.goal_point);
                trackingFitness += ((1.0f - (dist / environment.maxDistance)) * 0.5);
            }
            objectives[0] = trackingFitness;
            objectives[1] = inFormation;
            if (formHeight == 0.0) formHeight = 0.00001;

            return trackingFitness*2 + inFormation*.35 + (10.0/formHeight);
        }
Exemplo n.º 18
0
        public static double MAX_LONE_PATROL = 6000; // Observed in GECCO 2016 work

        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            if (currentEnvironment == TASK_TEAM_PATROL)
            { // Team patrol
                // Must normalize score
                return(teamPatrol.calculate(engine, environment, ip, out objectives) / MAX_TEAM_PATROL);
            }
            else if (currentEnvironment == TASK_LONE_PATROL)
            { // Lone patrol
                // Must normalize score
                return(lonePatrol.calculate(engine, environment, ip, out objectives) / MAX_LONE_PATROL);
            }
            else if (currentEnvironment == TASK_DUAL_TASK_HALLWAY || currentEnvironment == TASK_DUAL_TASK_FORAGE)
            { // Dual task
                // Both individual dual task fitnss scores are already normalized
                return(dualTask.calculate(engine, environment, ip, out objectives));
            }
            else if (currentEnvironment == TASK_TWO_ROOMS)
            { // Two rooms
                // Score is already normalized to [0,1]
                return(twoRooms.calculate(engine, environment, ip, out objectives));
            }
            else
            {
                Console.WriteLine("Error! Unknown environment! " + environment.name + ":" + currentEnvironment);
                objectives = new double[0];
                System.Environment.Exit(1);
                return(-1000);
            }
        }
Exemplo n.º 19
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] obj)
        {
            obj = new double[6];
            fitness = 0.000001;
            double go_sum = 1.0;
            double ret_sum = 1.0;
            double collide_count = 0;
            bool moved = true;

            //Checks to see if the robots have moved or turned since the signal has fired, meaning that they reacted to the signal
            for (int j = 0; j < ip.robots.Count; j++)
                if (turned[j] || origDist[j] - ip.robots[j].location.manhattenDistance(environment.goal_point) >= 5)
                {
                    continue;
                }
                else
                {
                    moved = false;
                    break;
                }

            if (!penalizeGettingToPoints)
                allClose = true;

            bool solve=true;
            double temp;
            if(!allClose || !moved) solve=false;
            for (int i = 0; i < ip.robots.Count; i++)
            {
                if(!reachGoal[i]) solve=false;
                if ((allClose && moved) || !penalizeSignalResponse)
                    fitness += gotoList[i];
                else
                    fitness += gotoList[i] / 10.0;
                temp = endList[i];
                    //if ((penalizeCorrection && !allCorrected) || (penalizeGettingToPoints && !allClose) || (penalizeSignalResponse && !moved))
                if (penalizeCorrection && (!allCorrected || !allClose))
                    temp /= 100;
                else if (penalizeGettingToPoints && !allClose)
                    temp /= 100;
                //if(penalizeGettingToPoints && !allClose)
                //    temp/=100;
                //if(penalizeSignalResponse && !moved)
                //    temp/=10;
                fitness+= temp;

                //Console.WriteLine(gotoList[i] + " " + endList[i]);
                go_sum *= (gotoList[i] + 0.01);
                ret_sum *= (endList[i] + 0.01);
                obj[i * 2] = 0; //gotoList[i];
                obj[i * 2 + 1] = 0; //endList[i];
                collide_count += ip.robots[i].collisions; //sensorList[i];//engine.robots[i].collisions;
            }

            obj[0] = go_sum;
            obj[1] = ret_sum;
            obj[2] = -collide_count;
            if(solve) fitness+=100.0;
            return fitness;
        }
Exemplo n.º 20
0
        public double calculate(SimulatorExperiment engine, Environment environment, out double[] obj)
        {
			obj=null;
            if (first)
                return 0;

            double fit = 0;
            double one = radius, two = radius;
            for (int j = 0; j < environment.POIPosition.Count; j++)
            {
                one = radius;
                two = radius;
                for (int k = 0; k < engine.robots.Count; k++)
                {
                    if (distances[j, k] < radius)
                    {
                        if (distances[j, k] < one)
                        {
                            two = one;
                            one = distances[j, k];
                        }
                        else if (distances[j, k] < two)
                            two = distances[j, k];
                    }
                }
                if (one != radius && two != radius)
                    fit += radius - ((one + two) / 2.0);
                else if (one != radius)
                    fit += (radius - one) / 10.0;
            }
            return Math.Max(fit,0.001);
        }
Exemplo n.º 21
0
 void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
 {
     foreach (Robot r in ip.robots)
     {
         double dx = r.location.x - r.old_location.x;
         latDist += dx;
     }
 }
Exemplo n.º 22
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives = null;

            if (latDist < 0) return 0;

            return latDist;
        }
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Initialize variables on first time step
            if (ip.timeSteps == 1)
            {
                livingCost = 0;
                rewards    = 0;
                // Fitness values must be positive. Therefore, a high positive fitness is assigned in advance,
                // and the cost of living subtracts from it.
                // time / steps is the number of actual time steps, and the cost is multiplied by number of enemies
                fitness = (Experiment.evaluationTime / Experiment.timestep) * Experiment.numEnemies;
            }

            // Find closest active prey
            bool  allCaptured = true; // becomes false if an active prey is found
            Robot evolved     = ip.robots[0];

            for (int i = 1; i < ip.robots.Count; i++)
            {
                // Assumes all but first robot are EnemyRobot instances
                EnemyRobot er = (EnemyRobot)ip.robots[i];
                if (!er.disabled) // Not captured yet
                {
                    //Console.WriteLine("Robot "+i+" not disabled");

                    allCaptured = false;
                    // The collisionWithEvolved bool should always be the primary means of detecting these
                    // collisions, but the other call is here as a precaution. This check is needed because
                    // when the evolved bot normally collides with the enemy, the "undo" method is called,
                    // which prevents the collision from being detected using normal means in this method.
                    // Fortunately, the collisionWithEvolved member is used to remember when this collision
                    // occurs.
                    if (er.collisionWithEvolved || EngineUtilities.collide(evolved, er))
                    { // Reward evolved bot for colliding with prey, and disable prey
                        er.disabled = true;
                        er.stopped  = true;
                        rewards    += PREY_REWARD;
                        fitness    += PREY_REWARD; // This is the value that matters
                        //Console.WriteLine("\treward:" + rewards + " from " + PREY_REWARD);
                    }
                    else
                    { // Each active prey punishes bot for not being caltured yet
                        double distance = evolved.location.distance(er.location);
                        double cost     = distance / environment.maxDistance;
                        livingCost += cost;
                        fitness    -= cost; // This is the value that matters
                        //Console.WriteLine("\tCost: " + (distance / 1000.0) + " to be " + livingCost + " raw distance: " + distance);
                    }
                }
            }

            // End evaluation and stop accruing negative fitness if all prey are captured
            if (allCaptured)
            {                                                   // Disabling prevents further action
                ip.elapsed         = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false;                     // Ends visual evaluation
            }
        }
Exemplo n.º 24
0
        //characterizing behavior...
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp,instance_pack i)
        {   
		//	Console.WriteLine(bc.Count.ToString());
			double[] obj;
			//TODO: fix eventually;
			//bc[0]=exp.fitnessFunction.calculate(exp,exp.environment,out obj);
            bc[0]=0.0;
			return new List<double>(bc);
        }
Exemplo n.º 25
0
 //characterizing behavior...
 List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack i)
 {
     //	Console.WriteLine(bc.Count.ToString());
     double[] obj;
     //TODO: fix eventually;
     //bc[0]=exp.fitnessFunction.calculate(exp,exp.environment,out obj);
     bc[0] = 0.0;
     return(new List <double>(bc));
 }
Exemplo n.º 26
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Initialize variables on first time step
            if (ip.timeSteps == 1)
            {
                livingCost = 0;
                rewards = 0;
                // Fitness values must be positive. Therefore, a high positive fitness is assigned in advance,
                // and the cost of living subtracts from it.
                // time / steps is the number of actual time steps, and the cost is multiplied by number of enemies
                fitness = (Experiment.evaluationTime / Experiment.timestep) * Experiment.numEnemies;
            }

            // Find closest active prey
            bool allCaptured = true; // becomes false if an active prey is found
            Robot evolved = ip.robots[0];
            for (int i = 1; i < ip.robots.Count; i++)
            {
                // Assumes all but first robot are EnemyRobot instances
                EnemyRobot er = (EnemyRobot)ip.robots[i];
                if (!er.disabled) // Not captured yet
                {
                    //Console.WriteLine("Robot "+i+" not disabled");

                    allCaptured = false;
                    // The collisionWithEvolved bool should always be the primary means of detecting these
                    // collisions, but the other call is here as a precaution. This check is needed because
                    // when the evolved bot normally collides with the enemy, the "undo" method is called,
                    // which prevents the collision from being detected using normal means in this method.
                    // Fortunately, the collisionWithEvolved member is used to remember when this collision
                    // occurs.
                    if (er.collisionWithEvolved || EngineUtilities.collide(evolved, er))
                    { // Reward evolved bot for colliding with prey, and disable prey
                        er.disabled = true;
                        er.stopped = true;
                        rewards += PREY_REWARD;
                        fitness += PREY_REWARD; // This is the value that matters
                        //Console.WriteLine("\treward:" + rewards + " from " + PREY_REWARD);
                    }
                    else
                    { // Each active prey punishes bot for not being caltured yet
                        double distance = evolved.location.distance(er.location);
                        double cost = distance / environment.maxDistance;
                        livingCost += cost;
                        fitness -= cost; // This is the value that matters
                        //Console.WriteLine("\tCost: " + (distance / 1000.0) + " to be " + livingCost + " raw distance: " + distance);
                    }
                }
            }

            // End evaluation and stop accruing negative fitness if all prey are captured
            if (allCaptured)
            { // Disabling prevents further action
                ip.elapsed = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false; // Ends visual evaluation
            }
        }
Exemplo n.º 27
0
 public override void Initialize(Environment e, SimulatorExperiment _exp, List <Robot> _rbts)
 {
     rbts = _rbts;
     exp  = _exp;
     if (_exp is MultiAgentExperiment)
     {
         agentCollide = ((MultiAgentExperiment)exp).agentsCollide;
         agentVisible = ((MultiAgentExperiment)exp).agentsVisible;
     }
     env = e;
 }
Exemplo n.º 28
0
        public void onTimeStep(SimulatorExperiment experiment)
        {
            //Disabled for now
            //TODO include again

            //tsteps++;
            //if(!all_in && eval.autopilot_count==0)
            //{
            //    eval.sim_engine.find_wall_by_name("door").visible=true;
            //    all_in=true;
            //}
        }
Exemplo n.º 29
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            if (!(ip.timeSteps % (int)(1 / ip.timestep) == 0))
            {
                //grid.decay_viewed(0);
                return;
            }
            bool   all_in   = true;
            double a_accum  = 0.00000001;
            double a_saccum = 0.00000001;

            foreach (Robot r in ip.robots)
            {
                if (!r.autopilot)
                {
                    foreach (ISensor s in r.sensors)
                    {
                        if (s is SignalSensor)
                        {
                            SignalSensor ss  = (SignalSensor)s;
                            double       val = ss.get_value();
                            val += 0.05;
                            if (val > 1.0)
                            {
                                val = 1.0;
                            }
                            ss.setSignal(val);
                        }
                    }
                }

                if ((environment.AOIRectangle.Contains((int)r.location.x, (int)r.location.y)))
                {
                    a_accum += 1.0 / (nearest(ip, r, environment));
                    if (r.corrected)
                    {
                        a_saccum += nearest(ip, r, environment);
                    }
                    //else
                    //	a_saccum+=1.0;
                }
                else
                {
                    all_in = false;
                }
            }

            if (all_in)
            {
                accum     += ((double)ip.robots.Count) / (a_accum);
                stopaccum += a_saccum / ((double)ip.robots.Count);
            }
        }
Exemplo n.º 30
0
        public void onTimeStep(SimulatorExperiment experiment)
        {
            //Disabled for now
            //TODO include again

            //tsteps++;
            //if(!all_in && eval.autopilot_count==0)
            //{
            //    eval.sim_engine.find_wall_by_name("door").visible=true;
            //    all_in=true;
            //}
        }
Exemplo n.º 31
0
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment engine)
        {
			Environment environment = engine.environment;
			//bc.Clear();
			for(int i=0;i<engine.robots.Count;i++)
			{
				//bc.Add(gotoList[i]);
				//bc.Add(endList[i]);
			}
	
			return new List<double>(bc);
        }
Exemplo n.º 32
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment,out double[] objectives)
        {
			objectives=null;
			double[] pmax=new double[3];
		    double[] prog=new double[3];
			double fitness=1.0;
			pmax[0]=0;
			pmax[1]=0;
			pmax[2]=0;
            for(int i=0;i<engine.robots.Count;i++) {
				
				
				int up_progress= -1;
				int up_type=0;
				int down_progress= 1000;
				double progress= 0;
				
				bool debug=false;
				if(travelList[i].Count==0) continue;
				int up_ind=0;
				for(int z=0;z<travelList[i].Count;z++) {
					int d=travelList[i][z];
					if (debug) Console.Write(d+" ");
					if ((d%3)>up_progress) {
					up_progress=d%3;
					up_ind=z;
					up_type=d/3;
					}
				}
				if(debug) Console.WriteLine();
				for(int z=up_ind;z<travelList[i].Count;z++) {
					int d=travelList[i][z];
					
					if ((d%3)<down_progress && (up_type==d/3)) {
					down_progress=d%3;
					}
				}
				progress= (up_progress+1) + (up_progress - down_progress);
				//if(finishList[i]==1) 
				progress+=finishList[i]*10.0;
				prog[i]=progress;
				if(progress>pmax[up_type])
					pmax[up_type]=progress;
			}
			double limit=Math.Min(Math.Min(prog[0],prog[1]),prog[2]);
			double differential=Math.Max(Math.Max(prog[0]-limit,prog[1]-limit),prog[2]-limit);
			for(int i=0;i<3;i++) {
				fitness+=pmax[i]*5;
				fitness+=prog[i];
				fitness-=differential*2;				
			}
            return Math.Max(0.00001,fitness);
        }
Exemplo n.º 33
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment,instance_pack ip,out double[] objectives)
        {
			objectives=new double[6];
        	objectives[0]=accum;
			double travel=0.0;
			foreach(Robot r in ip.robots) {
				travel+=r.dist_trav;
				coll_count+=r.collisions;
			}
			objectives[1]= stop_accum; //-collisions;
            return accum+stop_accum*2.0;
        }
Exemplo n.º 34
0
		public override void Initialize (Environment e,SimulatorExperiment _exp,List<Robot> _rbts)
		{
			rbts = _rbts;
			exp=_exp;
            if (_exp is MultiAgentExperiment)
            {
                agentCollide = ((MultiAgentExperiment)exp).agentsCollide;
                agentVisible = ((MultiAgentExperiment)exp).agentsVisible;
            }
            env = e;
			
		}
Exemplo n.º 35
0
        //characterizing behavior...
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp)
        {   
			bool disabled=false;
			for(int i=0;i<exp.robots.Count;i++) {
			if(exp.robots[i].disabled) disabled=true;
			}
			int count = samples;
			for(int i=0;i<exp.robots.Count;i++) {
			 double sumx=0.0,sumsqx=0.0,sumy=0.0,sumsqy=0.0;
			 for(int j=0;j<samples;j++) {
				double x=xc[i,j];
				double y=yc[i,j];
				sumx+=x;
				sumsqx+=(x*x);
				sumy+=y;
				sumsqy+=(y*y);	
			 }
				double meanx=sumx/count;
				double meansqx=sumsqx/count;
				double meany=sumy/count;
				double meansqy=sumsqy/count;
			 double varx = meansqx-(meanx*meanx);
			 double vary = meansqy-(meany*meany);
				if(varx<0) varx=0;
				if(vary<0) vary=0;
				double stdx=Math.Sqrt(varx);
				double stdy=Math.Sqrt(vary);
				
			 
			double disable_mult=1.0;
			 if(disabled) {
			 	disable_mult= -0.1;
			 }
			
			 double[] list={meanx*disable_mult,meany*disable_mult,stdx*disable_mult,stdy*disable_mult};
						 
			 int index=0;
				for(index=0;index<bc.Count;index+=4) {
					if(!disabled &&list[0] < bc[index])
						break;
					else if(disabled&&list[0] >bc[index])
						break;
				}
				bc.InsertRange(index,list);
			}
			
			/*
			for(int i=0;i<bc.Count;i++)
			Console.Write(bc[i].ToString()+ " ");
			Console.WriteLine();
			*/
            return new List<double>(bc);
        }
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // HACK: Both environments are the same, but this hack allows one to treat the food as poison
            bool poison = !environment.name.Equals("ENV_dual_task1.xml");

            if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2)
            {
                if (!poison) //forage
                {
                    ip.agentBrain.switchBrains(0);
                }
                else   //poison
                {
                    ip.agentBrain.switchBrains(1);
                }
            }

            //For food gathering
            if (ip.timeSteps == 1)
            {
                environment.goal_point.x = environment.POIPosition[0].X;
                environment.goal_point.y = environment.POIPosition[0].Y;

                collectedFood = 0;
                POINr         = 0;
            }

            // Schrum: Last sensor is for detecting when food/poison is eaten
            Robot        r = ip.robots[0]; // There should be only one robot in this task
            SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1];

            float d = (float)ip.robots[0].location.distance(environment.goal_point);

            if (d < 20.0f)
            {
                // Need to detect when food or poison is eaten
                s.setSignal(poison ? -1.0 : 1.0);

                collectedFood++;
                POINr++;
                if (POINr > 3)
                {
                    POINr = 0;
                }
                environment.goal_point.x = environment.POIPosition[POINr].X;
                environment.goal_point.y = environment.POIPosition[POINr].Y;
            }
            else
            {// Nothing eaten, so food/poison sensor is 0
                s.setSignal(0.0);
            }
        }
Exemplo n.º 37
0
        void IBehaviorCharacterization.update(SimulatorExperiment exp)
		{
			grid = ((GridCollision)((MultiAgentExperiment)exp).collisionManager).grid;
            dim = grid.coarseness;
            if (exp.timeSteps <=1)
            {
                for (int x = 0; x < dim; x++)
                {
                    for (int y = 0; y < dim; y++)
                    {
                        int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0);
                        int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0);
                        grid.grid[x, y].viewed = 0.0f;
                        grid.grid[x, y].idleness = 0.0f;  
						grid.grid[x, y].avg_idle=0.0f;
					}
                }
            }

            //if(!(Experiment.timeSteps % 5 ==0))
            //{
            //    grid.decay_viewed(0.3);
            //    return;
            //}

            for (int x = 0; x < dim; x++)
            {
                for (int y = 0; y < dim; y++)
                {
                    int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0);
                    int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0);
                    if ((exp.environment.AOIRectangle.Contains(gx, gy)))
                    {
                        if (grid.grid[x, y].viewed>=0.95f)
                        {
                            grid.grid[x, y].idleness = 0.0f;
                        }
                        else
                        {
                            if (grid.grid[x, y].idleness<255)
                                grid.grid[x, y].idleness += 1.0f;
                            
                            grid.grid[x,y].avg_idle += grid.grid[x, y].idleness;
                        }
                        //accum +=
                    }
                }
            }
			
            
        	
		}
Exemplo n.º 38
0
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment engine)
        {
            Environment environment = engine.environment;

            //bc.Clear();
            for (int i = 0; i < engine.robots.Count; i++)
            {
                //bc.Add(gotoList[i]);
                //bc.Add(endList[i]);
            }

            return(new List <double>(bc));
        }
Exemplo n.º 39
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment,out double[] obj)
        {
			obj=null;
			fitness=0.000001;
			for(int i=0;i<engine.robots.Count;i++)
			{
				fitness+=gotoList[i];
				fitness+=endList[i];
			}
			
			return fitness;
            //return Math.Max(0.00001,fitness);
        }
Exemplo n.º 40
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, out double[] obj)
        {
            obj     = null;
            fitness = 0.000001;
            for (int i = 0; i < engine.robots.Count; i++)
            {
                fitness += gotoList[i];
                fitness += endList[i];
            }

            return(fitness);
            //return Math.Max(0.00001,fitness);
        }
Exemplo n.º 41
0
        void IBehaviorCharacterization.update(SimulatorExperiment exp)
        {
            grid = ((GridCollision)((MultiAgentExperiment)exp).collisionManager).grid;
            dim  = grid.coarseness;
            if (exp.timeSteps <= 1)
            {
                for (int x = 0; x < dim; x++)
                {
                    for (int y = 0; y < dim; y++)
                    {
                        int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0);
                        int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0);
                        grid.grid[x, y].viewed   = 0.0f;
                        grid.grid[x, y].idleness = 0.0f;
                        grid.grid[x, y].avg_idle = 0.0f;
                    }
                }
            }

            //if(!(Experiment.timeSteps % 5 ==0))
            //{
            //    grid.decay_viewed(0.3);
            //    return;
            //}

            for (int x = 0; x < dim; x++)
            {
                for (int y = 0; y < dim; y++)
                {
                    int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0);
                    int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0);
                    if ((exp.environment.AOIRectangle.Contains(gx, gy)))
                    {
                        if (grid.grid[x, y].viewed >= 0.95f)
                        {
                            grid.grid[x, y].idleness = 0.0f;
                        }
                        else
                        {
                            if (grid.grid[x, y].idleness < 255)
                            {
                                grid.grid[x, y].idleness += 1.0f;
                            }

                            grid.grid[x, y].avg_idle += grid.grid[x, y].idleness;
                        }
                        //accum +=
                    }
                }
            }
        }
Exemplo n.º 42
0
        //characterizing behavior...
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp)
        {
            bool disabled = false;

            for (int i = 0; i < exp.robots.Count; i++)
            {
                if (exp.robots[i].disabled)
                {
                    disabled = true;
                }
            }

            for (int i = 0; i < exp.robots.Count; i++)
            {
                double minx = 1000, miny = 1000, maxx = -1000, maxy = -1000;
                for (int j = 0; j < samples; j++)
                {
                    if (xc[i, j] < minx)
                    {
                        minx = xc[i, j];
                    }
                    if (xc[i, j] > maxx)
                    {
                        maxx = xc[i, j];
                    }
                    if (yc[i, j] < miny)
                    {
                        miny = yc[i, j];
                    }
                    if (yc[i, j] > maxy)
                    {
                        maxy = yc[i, j];
                    }
                }
                disabled = false;       //disable for now...
                if (disabled)
                {
                    minx *= -0.1;
                    maxx *= -0.1;
                    miny *= -0.1;
                    maxy *= -0.1;
                }
                //bc.Add(minx);
                bc.Add(miny);
                //bc.Add(maxx);
                bc.Add(maxy);
            }

            //	Console.WriteLine(bc.Count.ToString());
            return(new List <double>(bc));
        }
Exemplo n.º 43
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            //Console.WriteLine(Experiment.multibrain + " && " + !Experiment.preferenceNeurons + " && " + (Experiment.numBrains == 2));
            // Schrum: Set which brain to use if the number is an experiment parameter
            // Schrum: Because the explicit brain switching only occurs when numBrains == 2, it will not register in FourTasks experiments using 5 brains
            if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2)
            {
                if (environment.name.EndsWith("ENV_dual_task.xml") || environment.name.EndsWith("FourTasks-ENV2.xml")) //HACK navigation
                {
                    ip.agentBrain.switchBrains(0);
                }
                else   //food gathering
                {
                    ip.agentBrain.switchBrains(1);
                }
            }

            //For navigation
            if (ip.robots[0].location.distance(new Point2D((int)environment.POIPosition[4].X, (int)environment.POIPosition[4].Y)) < 20.0f)
            {
                reachedGoal = true;
            }


            //For food gathering
            if (ip.timeSteps == 1)
            {
                environment.goal_point.x = environment.POIPosition[0].X;
                environment.goal_point.y = environment.POIPosition[0].Y;

                collectedFood = 0;
                POINr         = 0;
            }

            float d = (float)ip.robots[0].location.distance(environment.goal_point);

            if (d < 20.0f)
            {
                collectedFood++;
                POINr++;
                if (POINr > 3)
                {
                    POINr = 0;
                }
                environment.goal_point.x = environment.POIPosition[POINr].X;
                environment.goal_point.y = environment.POIPosition[POINr].Y;
            }

            //Console.WriteLine("reachedGoal = " + reachedGoal + ", d = " + d + ", goal = " + environment.goal_point);
        }
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // HACK: Both environments are the same, but this hack allows one to treat the food as poison
            bool poison = !environment.name.Equals("ENV_dual_task1.xml");

            if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2)
            {
                if (!poison) //forage
                {
                    ip.agentBrain.switchBrains(0);
                }
                else   //poison 
                {
                    ip.agentBrain.switchBrains(1);
                }
            }

            //For food gathering
            if (ip.timeSteps == 1)
            {
                environment.goal_point.x = environment.POIPosition[0].X;
                environment.goal_point.y = environment.POIPosition[0].Y;

                collectedFood = 0;
                POINr = 0;
            }

            // Schrum: Last sensor is for detecting when food/poison is eaten
            Robot r = ip.robots[0]; // There should be only one robot in this task
            SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1];

            float d = (float)ip.robots[0].location.distance(environment.goal_point);
            if (d < 20.0f)
            {
                // Need to detect when food or poison is eaten
                s.setSignal(poison ? -1.0 : 1.0);

                collectedFood++;
                POINr++;
                if (POINr > 3) POINr = 0;
                environment.goal_point.x = environment.POIPosition[POINr].X;
                environment.goal_point.y = environment.POIPosition[POINr].Y;

            }
            else
            {// Nothing eaten, so food/poison sensor is 0
                s.setSignal(0.0);
            }

        }
Exemplo n.º 45
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives    = new double[6];
            objectives[0] = accum;
            double travel = 0.0;

            foreach (Robot r in ip.robots)
            {
                travel     += r.dist_trav;
                coll_count += r.collisions;
            }
            objectives[1] = stop_accum;            //-collisions;
            return(accum + stop_accum * 2.0);
        }
Exemplo n.º 46
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment,instance_pack ip,out double[] objectives)
        {
			objectives=new double[6];
        	objectives[0]=100000.0/(accum+1.0);
			objectives[0]*=1000.0;
			double travel=0.0;
			foreach(Robot r in ip.robots) {
		        coll_count += r.collisions; //sensorList[i];//engine.robots[i].collisions;
				travel+=r.dist_trav;
			}
			objectives[1]= -coll_count;
			//objectives[1]=travel;
            return objectives[0];
        }
Exemplo n.º 47
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            grid = ((GridCollision)(ip.collisionManager)).grid;

            if (!(ip.timeSteps % (int)(1 / Experiment.timestep) == 0))
            {
                return;
            }

            int dim = grid.coarseness;

            for (int x = 0; x < dim; x++)
            {
                for (int y = 0; y < dim; y++)
                {
                    int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0);
                    int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0);
                    if ((environment.AOIRectangle.Contains(gx, gy)))
                    {
                        accum      += grid.grid[x, y].viewed;
                        stop_accum += grid.grid[x, y].viewed2;
                    }
                }
            }

            foreach (Robot r in ip.robots)
            {
                if (!r.autopilot)
                {
                    foreach (ISensor s in r.sensors)
                    {
                        if (s is SignalSensor)
                        {
                            SignalSensor ss  = (SignalSensor)s;
                            double       val = ss.get_value();
                            val += 0.05;
                            if (val > 1.0)
                            {
                                val = 1.0;
                            }
                            ss.setSignal(val);
                        }
                    }
                }
            }


            grid.decay_viewed(0);
            //grid.decay_viewed(.95);
        }
Exemplo n.º 48
0
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment engine,instance_pack ip)
        {
			Environment environment = engine.environment;
			bc.Clear();
			double gotosum=1.0;
			double endsum=1.0;
			for(int i=0;i<ip.robots.Count;i++)
			{
				bc.Add(gotoList[i]);
				bc.Add(endList[i]);
			}
			bc.Sort();
			return new List<double>(bc);
        }
Exemplo n.º 49
0
        //characterizing behavior...
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp)
        {   
			double mult=1.0;
			bool disabled=false;
			for(int i=0;i<exp.robots.Count;i++) {
			if(exp.robots[i].disabled) disabled=true;
			}
			if(disabled) mult= -0.1;
			
			for(int x=0;x<dim;x++)
				for(int y=0;y<dim;y++)
					bc.Add((grid.grid[x,y].avg_idle+1)/10000.0*mult);
		//	Console.WriteLine(bc.Count.ToString());
            return new List<double>(bc);
        }
        //int i = 0;
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Schrum: Debug: For comparing non-visual eval with visual
            // Prints out locations visited by all robots

            /*
             * for (int i = 0; i < ip.robots.Count; i++)
             * {
             *  Console.Write(ip.robots[i].location.x + "\t" + ip.robots[i].location.y + "\t");
             *  if (ip.robots[i] is EnemyRobot)
             *  {
             *      Console.Write(((EnemyRobot)ip.robots[i]).wallResponse + "\t" + ((EnemyRobot)ip.robots[i]).chaseResponse + "\t" + ip.robots[i].heading + "\t" + ((EnemyRobot)ip.robots[i]).angle + "\t" + ip.robots[i].collisions + "\t");
             *  }
             * }
             * Console.WriteLine();
             */
            /*
             * if (ip.robots[0].location.x != ((EnemyRobot)ip.robots[1]).getEvolved().location.x || ip.robots[0].location.y != ((EnemyRobot)ip.robots[1]).getEvolved().location.y)
             * {
             *  Console.WriteLine("Different locations:");
             *  Console.WriteLine("Robot 0: " + ip.robots[0].location);
             *  Console.WriteLine("Enemy's reference refr   to evolved: " + ((EnemyRobot)ip.robots[1]).getEvolved().location);
             *  if (i++ > 5)
             *  {
             *      System.Windows.Forms.Application.Exit();
             *      System.Environment.Exit(1);
             *  }
             * }
             */
            if (ip.timeSteps == 1)
            {
                collided     = false;
                portionAlive = 0;
            }

            // Schrum2: Added to detect robot collisions and end the evaluation when they happen
            if (ip.robots[0].collisions > 0)
            { // Disabling prevents further action
                //Console.WriteLine("Collision");
                collided           = true;
                ip.elapsed         = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false;                     // Ends visual evaluation
            }
            else
            {
                portionAlive++;
            }
        }
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives     = new double[6];
            objectives[0]  = 100000.0 / (accum + 1.0);
            objectives[0] *= 1000.0;
            double travel = 0.0;

            foreach (Robot r in ip.robots)
            {
                coll_count += r.collisions;         //sensorList[i];//engine.robots[i].collisions;
                travel     += r.dist_trav;
            }
            objectives[1] = -coll_count;
            //objectives[1]=travel;
            return(objectives[0]);
        }
Exemplo n.º 52
0
        void IBehaviorCharacterization.update(SimulatorExperiment exp,instance_pack ip)
		{
			
			if(count%sample_rate==0) {
			 int rc=0;
			 foreach(Robot r in ip.robots) {
			  xc[rc,samples]=r.location.x;
			  yc[rc,samples]=r.location.y;
			  rc++;
			 }
			 //Console.WriteLine(samples);
		     samples++;
			}
			count++;
			
		}
Exemplo n.º 53
0
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment engine, instance_pack ip)
        {
            Environment environment = engine.environment;

            bc.Clear();
            double gotosum = 1.0;
            double endsum  = 1.0;

            for (int i = 0; i < ip.robots.Count; i++)
            {
                bc.Add(gotoList[i]);
                bc.Add(endList[i]);
            }
            bc.Sort();
            return(new List <double>(bc));
        }
Exemplo n.º 54
0
 public override void Initialize(Environment e, SimulatorExperiment _exp, List <Robot> _rbts)
 {
     rbts = _rbts;
     env  = e;
     exp  = _exp;
     if (_exp is MultiAgentExperiment)
     {
         agentCollide = ((MultiAgentExperiment)_exp).agentsCollide;
         agentVisible = ((MultiAgentExperiment)_exp).agentsVisible;
     }
     grid = new collision_grid(e, coarseness);
     foreach (Wall w in env.walls)
     {
         grid.insert_into_grid(w);
     }
 }
Exemplo n.º 55
0
		public override void Initialize (Environment e,SimulatorExperiment _exp,List<Robot> _rbts)
		{
			rbts=_rbts;
			env=e;
			exp=_exp;
            if (_exp is MultiAgentExperiment)
            {
                agentCollide = ((MultiAgentExperiment)_exp).agentsCollide;
                agentVisible = ((MultiAgentExperiment)_exp).agentsVisible;
            }
                grid = new collision_grid(e,coarseness);
			foreach (Wall w in env.walls)
			{
				grid.insert_into_grid(w);
			}
		}
Exemplo n.º 56
0
 void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
 {
     if (count % sample_rate == 0)
     {
         int rc = 0;
         foreach (Robot r in ip.robots)
         {
             xc[rc, samples] = r.location.x;
             yc[rc, samples] = r.location.y;
             rc++;
         }
         //Console.WriteLine(samples);
         samples++;
     }
     count++;
 }
Exemplo n.º 57
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives = new double[6];
            double caughtFitness = preyCaught * 100.0;
            objectives[0] = caughtFitness;

            double timeFitness = 0;
            if (finished)
                timeFitness = (double)engine.evaluationTime - finishTime;
            if (timeFitness < 0) timeFitness = 0.0;
            objectives[1] = timeFitness;

            //double travelFitness = traveled * .0002;

            return caughtFitness + timeFitness * 2;  // +travelFitness;
        }
        //int i = 0;
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Schrum: Debug: For comparing non-visual eval with visual
            // Prints out locations visited by all robots
            /*
            for (int i = 0; i < ip.robots.Count; i++)
            {
                Console.Write(ip.robots[i].location.x + "\t" + ip.robots[i].location.y + "\t");
                if (ip.robots[i] is EnemyRobot)
                {
                    Console.Write(((EnemyRobot)ip.robots[i]).wallResponse + "\t" + ((EnemyRobot)ip.robots[i]).chaseResponse + "\t" + ip.robots[i].heading + "\t" + ((EnemyRobot)ip.robots[i]).angle + "\t" + ip.robots[i].collisions + "\t");
                }
            }
            Console.WriteLine();
            */
            /*
            if (ip.robots[0].location.x != ((EnemyRobot)ip.robots[1]).getEvolved().location.x || ip.robots[0].location.y != ((EnemyRobot)ip.robots[1]).getEvolved().location.y)
            {
                Console.WriteLine("Different locations:");
                Console.WriteLine("Robot 0: " + ip.robots[0].location);
                Console.WriteLine("Enemy's reference refr   to evolved: " + ((EnemyRobot)ip.robots[1]).getEvolved().location);
                if (i++ > 5)
                {
                    System.Windows.Forms.Application.Exit();
                    System.Environment.Exit(1);
                }
            }
            */
            if (ip.timeSteps == 1)
            {
                collided = false;
                portionAlive = 0;
            }

            // Schrum2: Added to detect robot collisions and end the evaluation when they happen
            if (ip.robots[0].collisions > 0)
            { // Disabling prevents further action
                //Console.WriteLine("Collision");
                collided = true;
                ip.elapsed = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false; // Ends visual evaluation
            }
            else
            {
                portionAlive++;
            }
        }
Exemplo n.º 59
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            incrementFitness(environment, ip);

            bool all = true;

            for (int i = 0; i < reachedPOI.Length; i++)
            {
                all = all && reachedPOI[i];
            }
            if (all)
            {
                return;      // Schrum: Done if all goals were reached
            }
            for (int i = 0; i < environment.POIPosition.Count; i++)
            {
                if (reachedPOI[i])
                {
                    continue; // Schrum: Once one POI has been reached, move to the next
                }
                else if (ip.robots[0].location.distance(new Point2D((int)environment.POIPosition[i].X, (int)environment.POIPosition[i].Y)) < 10.0f)
                {
                    reachedPOI[i] = true;
                    // Schrum: Only manually change brains if preference neurons are not used
                    // Schrum: Don't switch brains here if there are 5 brains, since this corresponds to the FourTasks experiments.
                    if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains != 5)
                    {
                        if (ip.agentBrain.numBrains == 3)             // Schrum: Playing with special cases. Still need something more general.
                        {
                            int[] mapping = new int[] { 1, 2, 1, 0 }; // Mapping to the next module to use. Module 1 is repeated since it is for straight corridors.
                            ip.agentBrain.switchBrains(mapping[i]);
                        }
                        else
                        {                                      // Schrum: I'm not sure this option is actually used anywhere
                            ip.agentBrain.switchBrains(i + 1); // Schrum: Switch to next brain (one for each step of task)
                        }
                    }
                }
                break; // Schrum: Can't reach two at once, and must reach in order. Only "continue" can get past this
            }

            // Schrum: Once all POIs have been checked, the goal (returning) can be checked. Goal treated like extra POI
            if (reachedPOI[2] && ip.robots[0].location.distance(environment.goal_point) < 10.0f)
            {
                reachedPOI[3] = true;
            }
        }
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment)
        {
            if (!(Experiment.timeSteps % (int)(1 / Experiment.timestep) == 0))
            {
                //grid.decay_viewed(0);
                return;
            }

            if (!allLeft)
            {
                bool allOut = true;
                for (int j = 0; j < Experiment.robots.Count; j++)
                {
                    if (!environment.AOIRectangle.Contains((int)Experiment.robots[j].location.x, (int)Experiment.robots[j].location.y))
                    {
                        allOut = false;
                        break;
                    }
                }
                if (allOut)
                {
                    allLeft = true;
                }
            }

            grid = ((GridCollision)((MultiAgentExperiment)Experiment).collisionManager).grid;


            int dim = grid.coarseness;

            for (int x = 0; x < dim; x++)
            {
                for (int y = 0; y < dim; y++)
                {
                    int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0);
                    int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0);
                    if ((environment.AOIRectangle.Contains(gx, gy)))
                    {
                        accum += grid.grid[x, y].viewed;
                    }
                }
            }

            //grid.decay_viewed(.9);
        }