コード例 #1
0
ファイル: poiMO.cs プロジェクト: jtglaze/IndependentWork2013
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] obj)
        {
            obj = new double[6];
            fitness = 0.000001;
            double go_sum = 1.0;
            double ret_sum = 1.0;
            double collide_count = 0;
            bool moved = true;

            //Checks to see if the robots have moved or turned since the signal has fired, meaning that they reacted to the signal
            for (int j = 0; j < ip.robots.Count; j++)
                if (turned[j] || origDist[j] - ip.robots[j].location.manhattenDistance(environment.goal_point) >= 5)
                {
                    continue;
                }
                else
                {
                    moved = false;
                    break;
                }

            if (!penalizeGettingToPoints)
                allClose = true;

            bool solve=true;
            double temp;
            if(!allClose || !moved) solve=false;
            for (int i = 0; i < ip.robots.Count; i++)
            {
                if(!reachGoal[i]) solve=false;
                if ((allClose && moved) || !penalizeSignalResponse)
                    fitness += gotoList[i];
                else
                    fitness += gotoList[i] / 10.0;
                temp = endList[i];
                    //if ((penalizeCorrection && !allCorrected) || (penalizeGettingToPoints && !allClose) || (penalizeSignalResponse && !moved))
                if (penalizeCorrection && (!allCorrected || !allClose))
                    temp /= 100;
                else if (penalizeGettingToPoints && !allClose)
                    temp /= 100;
                //if(penalizeGettingToPoints && !allClose)
                //    temp/=100;
                //if(penalizeSignalResponse && !moved)
                //    temp/=10;
                fitness+= temp;

                //Console.WriteLine(gotoList[i] + " " + endList[i]);
                go_sum *= (gotoList[i] + 0.01);
                ret_sum *= (endList[i] + 0.01);
                obj[i * 2] = 0; //gotoList[i];
                obj[i * 2 + 1] = 0; //endList[i];
                collide_count += ip.robots[i].collisions; //sensorList[i];//engine.robots[i].collisions;
            }

            obj[0] = go_sum;
            obj[1] = ret_sum;
            obj[2] = -collide_count;
            if(solve) fitness+=100.0;
            return fitness;
        }
コード例 #2
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives = new double[6];
            double trackingFitness = 0.0f;

            if (avgLoc == null) return 1;
            /*
            for (int i = 0; i < reachedPOI.Length; i++)
            {
                if (reachedPOI[i])
                    trackingFitness += 1.0f;
                else
                {
                    double dist = avgLoc.distance(new Point2D((int)environment.POIPosition[i].X, (int)environment.POIPosition[i].Y));
                    trackingFitness += ((1.0f - (dist / environment.maxDistance)) * 0.5);
                    break;
                }
            }*/
            if (reachedGoal)
            {
                trackingFitness = 10.0f;
            }
            else
            {
                double dist = avgLoc.distance(environment.goal_point);
                trackingFitness += ((1.0f - (dist / environment.maxDistance)) * 0.5);
            }
            objectives[0] = trackingFitness;
            objectives[1] = inFormation;
            if (formHeight == 0.0) formHeight = 0.00001;

            return trackingFitness*2 + inFormation*.35 + (10.0/formHeight);
        }
コード例 #3
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {


            double fitness = 0.0f;
            objectives = null;

            // ENV_dual_task.xml is the hallway navigation environment of the dual task
            // FourTasks-ENV2.xml is a direct copy of ENV_dual_task.xml, but the copy is
            // required to satisfy the way that the simulator runs multiple environments.
            if (environment.name.EndsWith("ENV_dual_task.xml") || environment.name.EndsWith("FourTasks-ENV2.xml")) //HACK navigation
            {
                fitness = (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[4].X, environment.POIPosition[4].Y)) / 650.0f);
 
                 if (fitness < 0) fitness = 0.00001f;
                 if (reachedGoal) fitness = 1;

               //  fitness = 1;
            }
            else   //food gathering 
            {
                float distFood = (float)(1.0f - (engine.robots[0].location.distance(environment.goal_point) / environment.maxDistance));
                fitness = (collectedFood + distFood) / 4.0f;

                if (fitness < 0) fitness = 0.00001f;
                if (collectedFood >= 4) fitness = 1.0f;

             //  fitness = 1;
            }
         
            return fitness;
        }
コード例 #4
0
        // Schrum: This method executes a single time step. It is called repeatedly during an evaluation
        override protected bool runEnvironment(Environment e, instance_pack ip, System.Threading.Semaphore sem)
        {
            // Schrum: ip is null initially, but should not be after first step
            if (ip != null)
            {
                //Console.WriteLine("Update ip");
                // Make sure enemies have accurate reference for evolved bot
                foreach(EnemyRobot r in enemies){
                    r.evolved = ip.robots[0];
                }
            }
            //else Console.WriteLine("IP NULL!");
            // Then run the environment as normal
            bool result = base.runEnvironment(e, ip, sem);

            // Schrum: To avoid a memory leak, remove references to the evolved bot from the enemies
            if (ip != null) // Only do when ip is not null
            { // ip is used during evolution, but not during visual post-eval
                foreach (EnemyRobot r in enemies)
                {
                    r.evolved = null;
                }
            }

            return result;
        }
コード例 #5
0
        // Schrum: This method executes a single time step. It is called repeatedly during an evaluation
        override protected bool runEnvironment(Environment e, instance_pack ip, System.Threading.Semaphore sem)
        {
            // Schrum: ip is null initially, but should not be after first step
            if (ip != null)
            {
                //Console.WriteLine("Update ip");
                // Make sure enemies have accurate reference for evolved bot
                foreach (EnemyRobot r in enemies)
                {
                    r.evolved = ip.robots[0];
                }
            }
            //else Console.WriteLine("IP NULL!");
            // Then run the environment as normal
            bool result = base.runEnvironment(e, ip, sem);

            // Schrum: To avoid a memory leak, remove references to the evolved bot from the enemies
            if (ip != null) // Only do when ip is not null
            {               // ip is used during evolution, but not during visual post-eval
                foreach (EnemyRobot r in enemies)
                {
                    r.evolved = null;
                }
            }

            return(result);
        }
コード例 #6
0
 void incrementFitness(Environment environment, instance_pack ip)
 {
     // Schrum: Added to avoid magic numbers and make purpose clear
     float MAX_DISTANCE = 300.0f;
     // Schrum: Last POI is actually goal: Return to start
     for (int i = 0; i < reachedPOI.Length; i++)
     {
         if (reachedPOI[i])
         {
             fitness += 1.0f;
         }
         else if (i == 3) { // Schrum: Hack: Last POI is actually the goal
             double gain = (1.0f - ip.robots[0].location.manhattenDistance(environment.goal_point) / MAX_DISTANCE);
             //Console.WriteLine("Goal Gain = " + gain);
             fitness += gain;
             break;
         }
         else
         {
             // Schrum: From HardMaze
             //fitness += (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[i].X, environment.POIPosition[i].Y)) / 650.0f);
             // Schrum: Guessing at distance to divide by
             // Schrum: Switched to Manhattan distance since Team Patrol uses it
             double gain = (1.0f - ip.robots[0].location.manhattenDistance(new Point2D(environment.POIPosition[i].X, environment.POIPosition[i].Y)) / MAX_DISTANCE);
             //Console.WriteLine("Gain = " + gain);
             fitness += gain;
             break;
         }
     }
 }
コード例 #7
0
        double nearest(instance_pack ip, Robot robot, Environment env)
        {
            double  nd    = 1000000.0;
            bool    found = false;
            Point2D p     = robot.location;

            foreach (Robot r in ip.robots)
            {
                if (r != robot && env.AOIRectangle.Contains((int)r.location.x, (int)r.location.y))
                {
                    double d = r.location.distance(p);
                    if (d < nd)
                    {
                        nd = d;
                    }
                    found = true;
                    //nd+=d;
                }
            }

            if (found)
            {
                return(nd);
            }
            else
            {
                return(0.0);
            }
        }
コード例 #8
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
           double fitness = 0.0f;

            //fitness += bonus;
            for (int i = 0; i < reachedPOI.Length; i++)
            {
                if (reachedPOI[i])
                {
                    fitness += 1.0f;
                }
                else
                {
                    fitness += (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[i].X, environment.POIPosition[i].Y)) / 650.0f);
                    break;
                }
            }
            if (reachedGoal)
            {
                fitness = 10.0f;
            }

            objectives = null;
            return fitness;
        }
コード例 #9
0
        public override void draw(Graphics g, CoordinateFrame scale)
        {
            if (collisionManager is GridCollision)
            {
                GridCollision gc = (GridCollision)collisionManager;
                gc.draw(g, scale);
            }


            if (environment != null)
            {
                environment.draw(g, scale);
            }

            foreach (Robot robot in robots)
            {
                robot.draw(g, scale);
            }
            double[] obj = null;

            instance_pack ip = new instance_pack();

            ip.robots           = robots;
            ip.env              = environment;
            ip.timeSteps        = this.timeSteps;
            ip.agentBrain       = agentBrain;
            ip.collisionManager = collisionManager;
            ip.elapsed          = this.elapsed;
            ip.ff       = this.fitnessFunction;
            ip.bc       = this.behaviorCharacterization;
            ip.timestep = timestep;

            g.DrawString("Fitness: " + this.fitnessFunction.calculate(this, this.environment, ip, out obj), new Font("Tahoma", 12), Brushes.Black, 10, 90);
            g.DrawString("Elapsed time: " + this.elapsed, new Font("Tahoma", 12), Brushes.Black, 10, 60);
        }
コード例 #10
0
 void incrementFitness(Environment environment, instance_pack ip) 
 {
     // Schrum: Added to avoid magic numbers and make purpose clear
     float MAX_DISTANCE = 300.0f;
     // Schrum: Last POI is actually goal: Return to start
     for (int i = 0; i < reachedPOI.Length; i++)
     {
         if (reachedPOI[i])
         {
             fitness += 1.0f;
         }
         else if (i == 3) { // Schrum: Hack: Last POI is actually the goal
             double gain = (1.0f - ip.robots[0].location.manhattenDistance(environment.goal_point) / MAX_DISTANCE);
             //Console.WriteLine("Goal Gain = " + gain);
             fitness += gain;
             break;
         }
         else
         {
             // Schrum: From HardMaze
             //fitness += (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[i].X, environment.POIPosition[i].Y)) / 650.0f);
             // Schrum: Guessing at distance to divide by
             // Schrum: Switched to Manhattan distance since Team Patrol uses it
             double gain = (1.0f - ip.robots[0].location.manhattenDistance(new Point2D(environment.POIPosition[i].X, environment.POIPosition[i].Y)) / MAX_DISTANCE);
             //Console.WriteLine("Gain = " + gain);
             fitness += gain;
             break;
         }
     }
 }
コード例 #11
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment,instance_pack ip)
        {
			
            if (!(Experiment.timeSteps % (int)(1 / Experiment.timestep) == 0))
            {
                //grid.decay_viewed(0);
                return;
            }
		
			foreach(Robot r in ip.robots) {
				if (!r.autopilot) {
					foreach(ISensor s in r.sensors) 
						if(s is SignalSensor) {
						 SignalSensor ss = (SignalSensor)s;
						 double val = ss.get_value();
						 val+=0.05;
						 if(val>1.0) val=1.0;
						 ss.setSignal(val);
						}
				}
			}
			
			
			
			double x1=(double)environment.AOIRectangle.Left;		
			double y1=(double)environment.AOIRectangle.Top;		
			double x2=(double)environment.AOIRectangle.Right;		
			double y2=(double)environment.AOIRectangle.Bottom;		
			int steps=10;
			accum+=test_interpolation(ip,x1,y1,x2,y1,steps);
			accum+=test_interpolation(ip,x2,y1,x2,y2,steps);
			accum+=test_interpolation(ip,x2,y2,x2,y1,steps);
			accum+=test_interpolation(ip,x2,y1,x1,y1,steps);

        }
コード例 #12
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {


            double fitness = 0.0f;
            objectives = null;

            if (environment.name.Equals("ENV_dual_task.xml")) //HACK navigation
            {
                fitness = (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[4].X, environment.POIPosition[4].Y)) / 650.0f);
 
                 if (fitness < 0) fitness = 0.00001f;
                 if (reachedGoal) fitness = 1;

               //  fitness = 1;
            }
            else   //food gathering 
            {
                float distFood = (float)(1.0f - (engine.robots[0].location.distance(environment.goal_point) / environment.maxDistance));
                fitness = (collectedFood + distFood) / 4.0f;

                if (fitness < 0) fitness = 0.00001f;
                if (collectedFood >= 4) fitness = 1.0f;

             //  fitness = 1;
            }
         
            return fitness;
        }
コード例 #13
0
 void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
 {
     foreach (Robot r in ip.robots)
     {
         double dx = r.location.x - r.old_location.x;
         latDist += dx;
     }
 }
コード例 #14
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives = null;

            if (latDist < 0) return 0;

            return latDist;
        }
コード例 #15
0
ファイル: FitnessBC.cs プロジェクト: jal278/agent_multimodal
        //characterizing behavior...
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp,instance_pack i)
        {   
		//	Console.WriteLine(bc.Count.ToString());
			double[] obj;
			//TODO: fix eventually;
			//bc[0]=exp.fitnessFunction.calculate(exp,exp.environment,out obj);
            bc[0]=0.0;
			return new List<double>(bc);
        }
コード例 #16
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Initialize variables on first time step
            if (ip.timeSteps == 1)
            {
                livingCost = 0;
                rewards = 0;
                // Fitness values must be positive. Therefore, a high positive fitness is assigned in advance,
                // and the cost of living subtracts from it.
                // time / steps is the number of actual time steps, and the cost is multiplied by number of enemies
                fitness = (Experiment.evaluationTime / Experiment.timestep) * Experiment.numEnemies;
            }

            // Find closest active prey
            bool allCaptured = true; // becomes false if an active prey is found
            Robot evolved = ip.robots[0];
            for (int i = 1; i < ip.robots.Count; i++)
            {
                // Assumes all but first robot are EnemyRobot instances
                EnemyRobot er = (EnemyRobot)ip.robots[i];
                if (!er.disabled) // Not captured yet
                {
                    //Console.WriteLine("Robot "+i+" not disabled");

                    allCaptured = false;
                    // The collisionWithEvolved bool should always be the primary means of detecting these
                    // collisions, but the other call is here as a precaution. This check is needed because
                    // when the evolved bot normally collides with the enemy, the "undo" method is called,
                    // which prevents the collision from being detected using normal means in this method.
                    // Fortunately, the collisionWithEvolved member is used to remember when this collision
                    // occurs.
                    if (er.collisionWithEvolved || EngineUtilities.collide(evolved, er))
                    { // Reward evolved bot for colliding with prey, and disable prey
                        er.disabled = true;
                        er.stopped = true;
                        rewards += PREY_REWARD;
                        fitness += PREY_REWARD; // This is the value that matters
                        //Console.WriteLine("\treward:" + rewards + " from " + PREY_REWARD);
                    }
                    else
                    { // Each active prey punishes bot for not being caltured yet
                        double distance = evolved.location.distance(er.location);
                        double cost = distance / environment.maxDistance;
                        livingCost += cost;
                        fitness -= cost; // This is the value that matters
                        //Console.WriteLine("\tCost: " + (distance / 1000.0) + " to be " + livingCost + " raw distance: " + distance);
                    }
                }
            }

            // End evaluation and stop accruing negative fitness if all prey are captured
            if (allCaptured)
            { // Disabling prevents further action
                ip.elapsed = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false; // Ends visual evaluation
            }
        }
コード例 #17
0
 //characterizing behavior...
 List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack i)
 {
     //	Console.WriteLine(bc.Count.ToString());
     double[] obj;
     //TODO: fix eventually;
     //bc[0]=exp.fitnessFunction.calculate(exp,exp.environment,out obj);
     bc[0] = 0.0;
     return(new List <double>(bc));
 }
コード例 #18
0
		double nearest(instance_pack ip, double x, double y) {
			double nd=10000000.0;
			Point2D p = new Point2D(x,y);
			foreach(Robot r in ip.robots) {
				double d=r.location.distance(p);
				if(d<nd) nd=d;
			}
			return nd;
		}
コード例 #19
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment,instance_pack ip,out double[] objectives)
        {
			objectives=new double[6];
        	objectives[0]=accum;
			double travel=0.0;
			foreach(Robot r in ip.robots) {
				travel+=r.dist_trav;
				coll_count+=r.collisions;
			}
			objectives[1]= stop_accum; //-collisions;
            return accum+stop_accum*2.0;
        }
コード例 #20
0
		double test_interpolation(instance_pack ip, double x1, double y1, double x2, double y2, int steps) {
			double acc=0.0;
			double ix = x1;
			double iy = y1;
			double dx = (x2-x1)/(steps-1);
			double dy = (y2-y1)/(steps-1);
			for(int k=0;k<steps;k++) {
				acc+=nearest(ip,ix,iy);
				ix+=dx;
				iy+=dy;
			}
			return acc;
		}
コード例 #21
0
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment engine,instance_pack ip)
        {
			Environment environment = engine.environment;
			bc.Clear();
			double gotosum=1.0;
			double endsum=1.0;
			for(int i=0;i<ip.robots.Count;i++)
			{
				bc.Add(gotoList[i]);
				bc.Add(endList[i]);
			}
			bc.Sort();
			return new List<double>(bc);
        }
コード例 #22
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // HACK: Both environments are the same, but this hack allows one to treat the food as poison
            bool poison = !environment.name.Equals("ENV_dual_task1.xml");

            if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2)
            {
                if (!poison) //forage
                {
                    ip.agentBrain.switchBrains(0);
                }
                else   //poison 
                {
                    ip.agentBrain.switchBrains(1);
                }
            }

            //For food gathering
            if (ip.timeSteps == 1)
            {
                environment.goal_point.x = environment.POIPosition[0].X;
                environment.goal_point.y = environment.POIPosition[0].Y;

                collectedFood = 0;
                POINr = 0;
            }

            // Schrum: Last sensor is for detecting when food/poison is eaten
            Robot r = ip.robots[0]; // There should be only one robot in this task
            SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1];

            float d = (float)ip.robots[0].location.distance(environment.goal_point);
            if (d < 20.0f)
            {
                // Need to detect when food or poison is eaten
                s.setSignal(poison ? -1.0 : 1.0);

                collectedFood++;
                POINr++;
                if (POINr > 3) POINr = 0;
                environment.goal_point.x = environment.POIPosition[POINr].X;
                environment.goal_point.y = environment.POIPosition[POINr].Y;

            }
            else
            {// Nothing eaten, so food/poison sensor is 0
                s.setSignal(0.0);
            }

        }
コード例 #23
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment,instance_pack ip,out double[] objectives)
        {
			objectives=new double[6];
        	objectives[0]=100000.0/(accum+1.0);
			objectives[0]*=1000.0;
			double travel=0.0;
			foreach(Robot r in ip.robots) {
		        coll_count += r.collisions; //sensorList[i];//engine.robots[i].collisions;
				travel+=r.dist_trav;
			}
			objectives[1]= -coll_count;
			//objectives[1]=travel;
            return objectives[0];
        }
コード例 #24
0
        double nearest(instance_pack ip, double x, double y)
        {
            double  nd = 10000000.0;
            Point2D p  = new Point2D(x, y);

            foreach (Robot r in ip.robots)
            {
                double d = r.location.distance(p);
                if (d < nd)
                {
                    nd = d;
                }
            }
            return(nd);
        }
コード例 #25
0
ファイル: DistanceBC.cs プロジェクト: jal278/agent_multimodal
        void IBehaviorCharacterization.update(SimulatorExperiment exp,instance_pack ip)
		{
			
			if(count%sample_rate==0) {
			 int rc=0;
			 foreach(Robot r in ip.robots) {
			  xc[rc,samples]=r.location.x;
			  yc[rc,samples]=r.location.y;
			  rc++;
			 }
			 //Console.WriteLine(samples);
		     samples++;
			}
			count++;
			
		}
コード例 #26
0
        double test_interpolation(instance_pack ip, double x1, double y1, double x2, double y2, int steps)
        {
            double acc = 0.0;
            double ix  = x1;
            double iy  = y1;
            double dx  = (x2 - x1) / (steps - 1);
            double dy  = (y2 - y1) / (steps - 1);

            for (int k = 0; k < steps; k++)
            {
                acc += nearest(ip, ix, iy);
                ix  += dx;
                iy  += dy;
            }
            return(acc);
        }
コード例 #27
0
 void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
 {
     if (count % sample_rate == 0)
     {
         int rc = 0;
         foreach (Robot r in ip.robots)
         {
             xc[rc, samples] = r.location.x;
             yc[rc, samples] = r.location.y;
             rc++;
         }
         //Console.WriteLine(samples);
         samples++;
     }
     count++;
 }
コード例 #28
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives = new double[6];
            double caughtFitness = preyCaught * 100.0;
            objectives[0] = caughtFitness;

            double timeFitness = 0;
            if (finished)
                timeFitness = (double)engine.evaluationTime - finishTime;
            if (timeFitness < 0) timeFitness = 0.0;
            objectives[1] = timeFitness;

            //double travelFitness = traveled * .0002;

            return caughtFitness + timeFitness * 2;  // +travelFitness;
        }
コード例 #29
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            //Console.WriteLine(Experiment.multibrain + " && " + !Experiment.preferenceNeurons + " && " + (Experiment.numBrains == 2));
            // Schrum: Set which brain to use if the number is an experiment parameter
            // Schrum: Because the explicit brain switching only occurs when numBrains == 2, it will not register in FourTasks experiments using 5 brains
            if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2)
            {
                if (environment.name.EndsWith("ENV_dual_task.xml") || environment.name.EndsWith("FourTasks-ENV2.xml")) //HACK navigation
                {
                    ip.agentBrain.switchBrains(0);
                }
                else   //food gathering 
                {
                    ip.agentBrain.switchBrains(1);
                }
            }

            //For navigation
            if (ip.robots[0].location.distance(new Point2D((int)environment.POIPosition[4].X, (int)environment.POIPosition[4].Y)) < 20.0f)
            {
                reachedGoal = true;
            }


            //For food gathering
            if (ip.timeSteps == 1)
            {
                environment.goal_point.x = environment.POIPosition[0].X;
                environment.goal_point.y = environment.POIPosition[0].Y;

                collectedFood = 0;
                POINr = 0;
            }

            float d = (float)ip.robots[0].location.distance(environment.goal_point);
            if (d < 20.0f)
            {
                collectedFood++;
                POINr++;
                if (POINr > 3) POINr = 0;
                environment.goal_point.x = environment.POIPosition[POINr].X;
                environment.goal_point.y = environment.POIPosition[POINr].Y;

            }

            //Console.WriteLine("reachedGoal = " + reachedGoal + ", d = " + d + ", goal = " + environment.goal_point);
        }
コード例 #30
0
        //int i = 0;
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Schrum: Debug: For comparing non-visual eval with visual
            // Prints out locations visited by all robots
            /*
            for (int i = 0; i < ip.robots.Count; i++)
            {
                Console.Write(ip.robots[i].location.x + "\t" + ip.robots[i].location.y + "\t");
                if (ip.robots[i] is EnemyRobot)
                {
                    Console.Write(((EnemyRobot)ip.robots[i]).wallResponse + "\t" + ((EnemyRobot)ip.robots[i]).chaseResponse + "\t" + ip.robots[i].heading + "\t" + ((EnemyRobot)ip.robots[i]).angle + "\t" + ip.robots[i].collisions + "\t");
                }
            }
            Console.WriteLine();
            */
            /*
            if (ip.robots[0].location.x != ((EnemyRobot)ip.robots[1]).getEvolved().location.x || ip.robots[0].location.y != ((EnemyRobot)ip.robots[1]).getEvolved().location.y)
            {
                Console.WriteLine("Different locations:");
                Console.WriteLine("Robot 0: " + ip.robots[0].location);
                Console.WriteLine("Enemy's reference refr   to evolved: " + ((EnemyRobot)ip.robots[1]).getEvolved().location);
                if (i++ > 5)
                {
                    System.Windows.Forms.Application.Exit();
                    System.Environment.Exit(1);
                }
            }
            */
            if (ip.timeSteps == 1)
            {
                collided = false;
                portionAlive = 0;
            }

            // Schrum2: Added to detect robot collisions and end the evaluation when they happen
            if (ip.robots[0].collisions > 0)
            { // Disabling prevents further action
                //Console.WriteLine("Collision");
                collided = true;
                ip.elapsed = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false; // Ends visual evaluation
            }
            else
            {
                portionAlive++;
            }
        }
コード例 #31
0
		double nearest(instance_pack ip, Robot robot,Environment env) {
			double nd=1000000.0;
			bool found =false;
			Point2D p = robot.location;
			foreach(Robot r in ip.robots) {
				if(r != robot && env.AOIRectangle.Contains((int)r.location.x,(int)r.location.y)) {
				double d=r.location.distance(p);
				if(d<nd) nd=d;
				found=true;
				//nd+=d;
				}
			}
			
			if(found)
				return nd;
			else
				return 0.0;
		}
コード例 #32
0
 double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
 {
     objectives = null;
     //Console.WriteLine("Fitness " + portionAlive);
     // Schrum: Debug
     /*
     if (portionAlive == 2001)
     {
         for (int i = 0; i < ip.robots[0].history.Count; i++)
         {
             Console.WriteLine(ip.robots[0].history[i].x + "\t" + ip.robots[0].history[i].y);
         }
         System.Windows.Forms.Application.Exit();
         System.Environment.Exit(1);
     }
     */
     return portionAlive;
 }
コード例 #33
0
        //characterizing behavior...
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            bool disabled = false;

            for (int i = 0; i < ip.robots.Count; i++)
            {
                if (ip.robots[i].disabled)
                {
                    disabled = true;
                }
            }

            for (int i = 0; i < ip.robots.Count; i++)
            {
                double minx = 1000, miny = 1000, maxx = -1000, maxy = -1000;

                /*
                 *     for(int j=0;j<samples;j+=(samples/2)) {
                 *     if(xc[i,j]<minx) minx=xc[i,j];
                 *     if(xc[i,j]>maxx) maxx=xc[i,j];
                 *     if(yc[i,j]<miny) miny=yc[i,j];
                 *     if(yc[i,j]>maxy) maxy=yc[i,j];
                 * }
                 * disabled=false;//disable for now...
                 * if(disabled) {
                 * minx *= -0.1;
                 * maxx *= -0.1;
                 * miny *= -0.1;
                 * maxy *= -0.1;
                 * }
                 *
                 *
                 * bc.Add(minx);
                 * bc.Add(miny);
                 * bc.Add(maxx);
                 * bc.Add(maxy);
                 */
                bc.Add(yc[i, samples / 2]);
                bc.Add(yc[i, samples - 1]);
            }

            //	Console.WriteLine(bc.Count.ToString());
            return(new List <double>(bc));
        }
コード例 #34
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            incrementFitness(environment, ip);

            bool all = true;
            for (int i = 0; i < reachedPOI.Length; i++)
            {
                all = all && reachedPOI[i];
            }
            if (all) return; // Schrum: Done if all goals were reached

            for (int i = 0; i < environment.POIPosition.Count; i++)
            {
                if (reachedPOI[i])
                {
                    continue; // Schrum: Once one POI has been reached, move to the next
                }
                else if (ip.robots[0].location.distance(new Point2D((int)environment.POIPosition[i].X, (int)environment.POIPosition[i].Y)) < 10.0f)
                {
                    reachedPOI[i] = true;
                    // Schrum: Only manually change brains if preference neurons are not used
                    // Schrum: Don't switch brains here if there are 5 brains, since this corresponds to the FourTasks experiments.
                    if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains != 5)
                    {
                        if (ip.agentBrain.numBrains == 3) // Schrum: Playing with special cases. Still need something more general.
                        {
                            int[] mapping = new int[] { 1, 2, 1, 0 }; // Mapping to the next module to use. Module 1 is repeated since it is for straight corridors.
                            ip.agentBrain.switchBrains(mapping[i]);
                        }
                        else
                        {   // Schrum: I'm not sure this option is actually used anywhere
                            ip.agentBrain.switchBrains(i + 1); // Schrum: Switch to next brain (one for each step of task)
                        }
                    }
                }
                break; // Schrum: Can't reach two at once, and must reach in order. Only "continue" can get past this
            }

            // Schrum: Once all POIs have been checked, the goal (returning) can be checked. Goal treated like extra POI
            if (reachedPOI[2] && ip.robots[0].location.distance(environment.goal_point) < 10.0f)
            {
                reachedPOI[3] = true;
            }
        }
コード例 #35
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment,instance_pack ip)
        {
			
            grid = ((GridCollision)(ip.collisionManager)).grid;
			
            if (!(ip.timeSteps % (int)(1 / Experiment.timestep) == 0))
            {
                return;
            }

            int dim = grid.coarseness;
            for (int x = 0; x < dim; x++)
            {
                for (int y = 0; y < dim; y++)
                {
                    int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0);
                    int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0);
                    if ((environment.AOIRectangle.Contains(gx, gy)))
                    {
                        accum += grid.grid[x, y].viewed;
						stop_accum+= grid.grid[x,y].viewed2;
                    }

                }
            }

			foreach(Robot r in ip.robots) {
				if (!r.autopilot) {
					foreach(ISensor s in r.sensors) 
						if(s is SignalSensor) {
						 SignalSensor ss = (SignalSensor)s;
						 double val = ss.get_value();
						 val+=0.05;
						 if(val>1.0) val=1.0;
						 ss.setSignal(val);
						}
				}
			}
			

            grid.decay_viewed(0);
            //grid.decay_viewed(.95);
        }
コード例 #36
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            if (reachedGoal) return;

           // if (ip.robots[0].collisions > 0) return;

            if (ip.robots[0].location.distance(environment.goal_point) < 35.0f)
            {
                reachedGoal = true;
            }

            for (int i = 0; i < environment.POIPosition.Count; i++)
            {
                if (ip.robots[0].location.distance(new Point2D((int)environment.POIPosition[i].X, (int)environment.POIPosition[i].Y)) < 20.0f)
                {
                    reachedPOI[i] = true;
                }
            }

        }
コード例 #37
0
ファイル: DistanceBC.cs プロジェクト: jal278/agent_multimodal
        //characterizing behavior...
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp,instance_pack ip)
        {   
	
			for(int i=0;i<ip.robots.Count;i++) {
					
				double d=0.0;
			 for(int j=1;j<samples;j++) {
				double dx=xc[i,j]-xc[i,j-1];
				double dy=yc[i,j]-yc[i,j-1];
				d+=dx*dx+dy*dy;
				}			 
			 if(ip.robots[i].disabled) d *= -0.1; 
			 bc.Add(d);
			 bc.Sort();
				
		    }

			//Console.WriteLine(bc.Count.ToString());
            return new List<double>(bc);
        }
コード例 #38
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment,instance_pack ip)
        {			
            if (!(ip.timeSteps % (int)(1 / ip.timestep) == 0))
            {
                //grid.decay_viewed(0);
                return;
            }
			bool all_in=true;
			double a_accum=0.00000001;
			double a_saccum=0.00000001;
			foreach(Robot r in ip.robots) {
				if (!r.autopilot) {
					foreach(ISensor s in r.sensors) 
						if(s is SignalSensor) {
						 SignalSensor ss = (SignalSensor)s;
						 double val = ss.get_value();
						 val+=0.05;
						 if(val>1.0) val=1.0;
						 ss.setSignal(val);
						}
				}
				
				if ((environment.AOIRectangle.Contains((int)r.location.x,(int)r.location.y)))
				{
				a_accum+=1.0/(nearest(ip,r,environment));
				if(r.corrected)
					a_saccum+=nearest(ip,r,environment);
				//else
				//	a_saccum+=1.0;
					
				}
				else {
					all_in=false;
				}
			}
			
				if(all_in) {
					accum+=((double)ip.robots.Count)/(a_accum);
					stopaccum+=a_saccum/((double)ip.robots.Count);
				}
        }
コード例 #39
0
        //characterizing behavior...
        List<double> IBehaviorCharacterization.calculate(SimulatorExperiment exp,instance_pack ip)
        {
          
            //TODO reimplement
            List<double> bc = new List<double>();
			
            for (int agentIndex = 0; agentIndex < ip.robots.Count; agentIndex++)
            {
				double x;
				double y;
                if(exp.environment.AOIRectangle.Contains((int)ip.robots[agentIndex].location.x, (int)ip.robots[agentIndex].location.y))
                {
                    x = ip.robots[agentIndex].location.x;
                    y = ip.robots[agentIndex].location.y;
                    x = (x - exp.environment.AOIRectangle.Left) / exp.environment.AOIRectangle.Width;
                    y = (y - exp.environment.AOIRectangle.Top) / exp.environment.AOIRectangle.Height;
					
					if(!ip.robots[agentIndex].corrected) {
						x*= -0.1;
						y*= -0.1;
					}
                    
                }
                else
				{ 
			        x=-0.1;
                    y=-0.1;
				}  
				int k;
				for(k=0;k<bc.Count;k+=2) {
					if(x<bc[k]) {
						break;
					}
				}
				bc.Insert(k,y);
                bc.Insert(k,x);
                
                    
            }
            return bc;
        }
コード例 #40
0
        //characterizing behavior...
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            //TODO reimplement
            List <double> bc = new List <double>();

            for (int agentIndex = 0; agentIndex < ip.robots.Count; agentIndex++)
            {
                double x;
                double y;
                if (exp.environment.AOIRectangle.Contains((int)ip.robots[agentIndex].location.x, (int)ip.robots[agentIndex].location.y))
                {
                    x = ip.robots[agentIndex].location.x;
                    y = ip.robots[agentIndex].location.y;
                    x = (x - exp.environment.AOIRectangle.Left) / exp.environment.AOIRectangle.Width;
                    y = (y - exp.environment.AOIRectangle.Top) / exp.environment.AOIRectangle.Height;

                    if (!ip.robots[agentIndex].corrected)
                    {
                        x *= -0.1;
                        y *= -0.1;
                    }
                }
                else
                {
                    x = -0.1;
                    y = -0.1;
                }
                int k;
                for (k = 0; k < bc.Count; k += 2)
                {
                    if (x < bc[k])
                    {
                        break;
                    }
                }
                bc.Insert(k, y);
                bc.Insert(k, x);
            }
            return(bc);
        }
コード例 #41
0
        //characterizing behavior...
        List <double> IBehaviorCharacterization.calculate(SimulatorExperiment exp, instance_pack ip)
        {
            for (int i = 0; i < ip.robots.Count; i++)
            {
                double d = 0.0;
                for (int j = 1; j < samples; j++)
                {
                    double dx = xc[i, j] - xc[i, j - 1];
                    double dy = yc[i, j] - yc[i, j - 1];
                    d += dx * dx + dy * dy;
                }
                if (ip.robots[i].disabled)
                {
                    d *= -0.1;
                }
                bc.Add(d);
                bc.Sort();
            }

            //Console.WriteLine(bc.Count.ToString());
            return(new List <double>(bc));
        }
コード例 #42
0
        // Schrum2: Add extra enemy robot after deault robots
        public override void initializeRobots(AgentBrain agentBrain, Environment e, float headingNoise, float sensorNoise, float effectorNoise, instance_pack ip)
        {
            // Debug:schrum2
            //Console.WriteLine("Init Robots");
            base.initializeRobots(agentBrain, e, headingNoise, sensorNoise, effectorNoise, ip);
            enemies = new List <EnemyRobot>();
            for (int i = 0; i < numEnemies; i++)
            {
                // schrum2: here is where the enemy robot is added
                // Location of evolved robot is needed to track it
                // Assumes that robot is in position 0 (problem?)
                EnemyRobot r = new EnemyRobot(robots[0], flee);
                enemies.Add(r);

                double _timestep = 0.0;
                if (ip == null)
                {
                    _timestep = this.timestep;
                }
                else
                {
                    _timestep = ip.timestep;
                }

                double nx = enemyX - i * enemyDeltaX;
                double ny = enemyY - i * enemyDeltaY;
                double h  = 0;

                // Alternative starting positions for enemies
                if (enemiesSurround)
                {
                    double evolvedX = robots[0].location.x;
                    double evolvedY = robots[0].location.y;

                    double radius = 200.0;

                    nx = evolvedX + radius * Math.Cos((i * 2.0 * Math.PI) / numEnemies);
                    ny = evolvedY + radius * Math.Sin((i * 2.0 * Math.PI) / numEnemies);
                    h  = Math.PI + i * (2 * Math.PI / numEnemies);
                }

                r.init(robots.Count, // id is last position in list of robots
                       nx, ny, h,    // starting position and heading
                       agentBrain,   // Has evolved brain (to avoid NullPointers, etc), but should never actually use it (problem?)
                       e, sensorNoise, effectorNoise, headingNoise, (float)_timestep);

                r.collisionPenalty = collisionPenalty;
                // Only add enemy if it is not already present
                if (robots.Count <= numEnemies) // Makes limiting assumption that this experiment will always be used with only one evolved bot and one enemy
                {
                    robots.Add(r);
                    //Console.WriteLine("Added enemy to list: " + robots.Count);
                }

                // Required so that experiment works properly in both visual and non-visual mode
                // (The non-visual mode takes the robots from the instance_pack instead of the experiment)
                if (ip != null && ip.robots.Count <= numEnemies)
                {
                    ip.robots.Add(r);
                    //Console.WriteLine("Added enemy to ip");
                }
            }
        }
コード例 #43
0
 void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack ip)
 {
 }
コード例 #44
0
 double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
 {
     objectives = null;
     //Console.WriteLine("rewards:" + rewards + ":livingCost:" + livingCost + ":fitness:" + (rewards - livingCost));
     return(fitness);
 }
コード例 #45
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Initialize variables on first time step
            if (ip.timeSteps == 1)
            {
                livingCost = 0;
                rewards    = 0;
                // Fitness values must be positive. Therefore, a high positive fitness is assigned in advance,
                // and the cost of living subtracts from it.
                // time / steps is the number of actual time steps, and the cost is multiplied by number of enemies
                fitness = (Experiment.evaluationTime / Experiment.timestep) * Experiment.numEnemies;
            }

            // Find closest active prey
            bool  allCaptured = true; // becomes false if an active prey is found
            Robot evolved     = ip.robots[0];

            for (int i = 1; i < ip.robots.Count; i++)
            {
                // Assumes all but first robot are EnemyRobot instances
                EnemyRobot er = (EnemyRobot)ip.robots[i];
                if (!er.disabled) // Not captured yet
                {
                    //Console.WriteLine("Robot "+i+" not disabled");

                    allCaptured = false;
                    // The collisionWithEvolved bool should always be the primary means of detecting these
                    // collisions, but the other call is here as a precaution. This check is needed because
                    // when the evolved bot normally collides with the enemy, the "undo" method is called,
                    // which prevents the collision from being detected using normal means in this method.
                    // Fortunately, the collisionWithEvolved member is used to remember when this collision
                    // occurs.
                    if (er.collisionWithEvolved || EngineUtilities.collide(evolved, er))
                    { // Reward evolved bot for colliding with prey, and disable prey
                        er.disabled = true;
                        er.stopped  = true;
                        rewards    += PREY_REWARD;
                        fitness    += PREY_REWARD; // This is the value that matters
                        //Console.WriteLine("\treward:" + rewards + " from " + PREY_REWARD);
                    }
                    else
                    { // Each active prey punishes bot for not being caltured yet
                        double distance = evolved.location.distance(er.location);
                        double cost     = distance / environment.maxDistance;
                        livingCost += cost;
                        fitness    -= cost; // This is the value that matters
                        //Console.WriteLine("\tCost: " + (distance / 1000.0) + " to be " + livingCost + " raw distance: " + distance);
                    }
                }
            }

            // End evaluation and stop accruing negative fitness if all prey are captured
            if (allCaptured)
            {                                                   // Disabling prevents further action
                ip.elapsed         = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false;                     // Ends visual evaluation
            }
        }
コード例 #46
0
ファイル: poiMO.cs プロジェクト: MadsAnthony/ThesisProject
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] obj)
        {
            obj     = new double[6];
            fitness = 0.000001;
            double go_sum        = 1.0;
            double ret_sum       = 1.0;
            double collide_count = 0;
            bool   moved         = true;

            //Checks to see if the robots have moved or turned since the signal has fired, meaning that they reacted to the signal
            for (int j = 0; j < ip.robots.Count; j++)
            {
                if (turned[j] || origDist[j] - ip.robots[j].location.manhattenDistance(environment.goal_point) >= 5)
                {
                    continue;
                }
                else
                {
                    moved = false;
                    break;
                }
            }

            if (!penalizeGettingToPoints)
            {
                allClose = true;
            }

            bool   solve = true;
            double temp;

            if (!allClose || !moved)
            {
                solve = false;
            }
            for (int i = 0; i < ip.robots.Count; i++)
            {
                if (!reachGoal[i])
                {
                    solve = false;
                }
                if ((allClose && moved) || !penalizeSignalResponse)
                {
                    fitness += gotoList[i];
                }
                else
                {
                    fitness += gotoList[i] / 10.0;
                }
                temp = endList[i];
                //if ((penalizeCorrection && !allCorrected) || (penalizeGettingToPoints && !allClose) || (penalizeSignalResponse && !moved))
                if (penalizeCorrection && (!allCorrected || !allClose))
                {
                    temp /= 100;
                }
                else if (penalizeGettingToPoints && !allClose)
                {
                    temp /= 100;
                }
                //if(penalizeGettingToPoints && !allClose)
                //    temp/=100;
                //if(penalizeSignalResponse && !moved)
                //    temp/=10;
                fitness += temp;

                //Console.WriteLine(gotoList[i] + " " + endList[i]);
                go_sum        *= (gotoList[i] + 0.01);
                ret_sum       *= (endList[i] + 0.01);
                obj[i * 2]     = 0;                       //gotoList[i];
                obj[i * 2 + 1] = 0;                       //endList[i];
                collide_count += ip.robots[i].collisions; //sensorList[i];//engine.robots[i].collisions;
            }

            obj[0] = go_sum;
            obj[1] = ret_sum;
            obj[2] = -collide_count;
            // Schrum: This 100 point fitness bonus shouldn't be here
            //if(solve) fitness+=100.0;
            return(fitness);
        }
コード例 #47
0
        internal override double evaluateNetwork(INetwork network, out SharpNeatLib.BehaviorType behavior, System.Threading.Semaphore sem)
        {
            double     fitness = 0;//SharpNeatLib.Utilities.random.NextDouble();
            NeatGenome tempGenome;
            INetwork   controller;

            behavior = new SharpNeatLib.BehaviorType();

            double[] accumObjectives = new double[6];
            for (int i = 0; i < 6; i++)
            {
                accumObjectives[i] = 0.0;
            }

            IFitnessFunction          fit_copy;
            IBehaviorCharacterization bc_copy;
            CollisionManager          cm;
            instance_pack             inst = new instance_pack();

            inst.timestep = timestep;
            foreach (Environment env2 in environmentList)
            {
                fit_copy = fitnessFunction.copy();
                if (behaviorCharacterization != null)
                {
                    bc_copy = behaviorCharacterization.copy();
                    inst.bc = bc_copy;
                }
                inst.ff = fit_copy;

                double   tempFit   = 0;
                double[] fitnesses = new double[timesToRunEnvironments];
                SharpNeatLib.Maths.FastRandom evalRand = new SharpNeatLib.Maths.FastRandom(100);
                for (int evals = 0; evals < timesToRunEnvironments; evals++)
                {
                    int agent_trials = timesToRunEnvironments;

                    inst.num_rbts = this.numberRobots;

                    Environment env = env2.copy();

                    double evalTime = evaluationTime;

                    inst.eval = evals;
                    env.seed  = evals;
                    env.rng   = new SharpNeatLib.Maths.FastRandom(env.seed + 1);


                    int noise_lev = (int)this.sensorNoise + 1;

                    float new_sn = evalRand.NextUInt() % noise_lev;
                    float new_ef = evalRand.NextUInt() % noise_lev;


                    inst.agentBrain = new AgentBrain(homogeneousTeam, inst.num_rbts, substrateDescription, network, normalizeWeights, adaptableANN, modulatoryANN, multibrain, evolveSubstrate);
                    initializeRobots(agentBrain, env, headingNoise, new_sn, new_ef, inst);

                    inst.elapsed  = 0;
                    inst.timestep = this.timestep;
                    //Console.WriteLine(this.timestep);

                    inst.timeSteps        = 0;
                    inst.collisionManager = collisionManager.copy();
                    inst.collisionManager.Initialize(env, this, inst.robots);

                    try
                    {
                        while (inst.elapsed < evalTime)
                        {
                            runEnvironment(env, inst, sem);
                        }
                    }
                    catch (Exception e)
                    {
                        for (int x = 0; x < inst.robots.Count; x++)
                        {
                            for (int y = 0; y < inst.robots[x].history.Count; y++)
                            {
                                Console.WriteLine(x + " " + y + " " + inst.robots[x].history[y].x + " " + inst.robots[x].history[y].y);
                            }
                        }


                        behavior = new SharpNeatLib.BehaviorType();
                        Console.WriteLine(e.Message);
                        Console.WriteLine(e.StackTrace);
                        throw (e);
                        return(0.0001);
                    }


                    double thisFit = inst.ff.calculate(this, env, inst, out behavior.objectives);
                    fitnesses[evals] = thisFit;
                    tempFit         += thisFit;


                    if (behavior != null && behavior.objectives != null && inst.bc != null)
                    {
                        for (int i = 0; i < behavior.objectives.Length; i++)
                        {
                            accumObjectives[i] += behavior.objectives[i];
                        }

                        if (behavior.behaviorList == null)
                        {
                            behavior.behaviorList = new List <double>();
                        }
                        behavior.behaviorList.AddRange(inst.bc.calculate(this, inst));

                        inst.bc.reset();
                    }

                    inst.ff.reset();
                }
                fitness += tempFit / timesToRunEnvironments;
            }
            behavior.objectives = accumObjectives;
            return(fitness / environmentList.Count);
        }
コード例 #48
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            double fitness = 0.0f;

            objectives = null;

            // ENV_dual_task.xml is the hallway navigation environment of the dual task
            // FourTasks-ENV2.xml is a direct copy of ENV_dual_task.xml, but the copy is
            // required to satisfy the way that the simulator runs multiple environments.
            if (environment.name.EndsWith("ENV_dual_task.xml") || environment.name.EndsWith("FourTasks-ENV2.xml")) //HACK navigation
            {
                fitness = (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[4].X, environment.POIPosition[4].Y)) / 650.0f);

                if (fitness < 0)
                {
                    fitness = 0.00001f;
                }
                if (reachedGoal)
                {
                    fitness = 1;
                }

                //  fitness = 1;
            }
            else   //food gathering
            {
                float distFood = (float)(1.0f - (engine.robots[0].location.distance(environment.goal_point) / environment.maxDistance));
                fitness = (collectedFood + distFood) / 4.0f;

                if (fitness < 0)
                {
                    fitness = 0.00001f;
                }
                if (collectedFood >= 4)
                {
                    fitness = 1.0f;
                }

                //  fitness = 1;
            }

            return(fitness);
        }
コード例 #49
0
        //int i = 0;
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // Schrum: Debug: For comparing non-visual eval with visual
            // Prints out locations visited by all robots

            /*
             * for (int i = 0; i < ip.robots.Count; i++)
             * {
             *  Console.Write(ip.robots[i].location.x + "\t" + ip.robots[i].location.y + "\t");
             *  if (ip.robots[i] is EnemyRobot)
             *  {
             *      Console.Write(((EnemyRobot)ip.robots[i]).wallResponse + "\t" + ((EnemyRobot)ip.robots[i]).chaseResponse + "\t" + ip.robots[i].heading + "\t" + ((EnemyRobot)ip.robots[i]).angle + "\t" + ip.robots[i].collisions + "\t");
             *  }
             * }
             * Console.WriteLine();
             */
            /*
             * if (ip.robots[0].location.x != ((EnemyRobot)ip.robots[1]).getEvolved().location.x || ip.robots[0].location.y != ((EnemyRobot)ip.robots[1]).getEvolved().location.y)
             * {
             *  Console.WriteLine("Different locations:");
             *  Console.WriteLine("Robot 0: " + ip.robots[0].location);
             *  Console.WriteLine("Enemy's reference refr   to evolved: " + ((EnemyRobot)ip.robots[1]).getEvolved().location);
             *  if (i++ > 5)
             *  {
             *      System.Windows.Forms.Application.Exit();
             *      System.Environment.Exit(1);
             *  }
             * }
             */
            if (ip.timeSteps == 1)
            {
                collided     = false;
                portionAlive = 0;
            }

            // Schrum2: Added to detect robot collisions and end the evaluation when they happen
            if (ip.robots[0].collisions > 0)
            { // Disabling prevents further action
                //Console.WriteLine("Collision");
                collided           = true;
                ip.elapsed         = Experiment.evaluationTime; // force end time: only affects non-visual evaluation
                Experiment.running = false;                     // Ends visual evaluation
            }
            else
            {
                portionAlive++;
            }
        }
コード例 #50
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            //Console.WriteLine(Experiment.multibrain + " && " + !Experiment.preferenceNeurons + " && " + (Experiment.numBrains == 2));
            // Schrum: Set which brain to use if the number is an experiment parameter
            if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2)
            {
                if (environment.name.Equals("ENV_dual_task.xml")) //HACK navigation
                {
                    ip.agentBrain.switchBrains(0);
                }
                else   //food gathering
                {
                    ip.agentBrain.switchBrains(1);
                }
            }

            //For navigation
            if (ip.robots[0].location.distance(new Point2D((int)environment.POIPosition[4].X, (int)environment.POIPosition[4].Y)) < 20.0f)
            {
                reachedGoal = true;
            }


            //For food gathering
            if (ip.timeSteps == 1)
            {
                environment.goal_point.x = environment.POIPosition[0].X;
                environment.goal_point.y = environment.POIPosition[0].Y;

                collectedFood = 0;
                POINr         = 0;
            }

            float d = (float)ip.robots[0].location.distance(environment.goal_point);

            if (d < 20.0f)
            {
                collectedFood++;
                POINr++;
                if (POINr > 3)
                {
                    POINr = 0;
                }
                environment.goal_point.x = environment.POIPosition[POINr].X;
                environment.goal_point.y = environment.POIPosition[POINr].Y;
            }
        }
コード例 #51
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            double fitness = 0.0f;

            objectives = null;

            if (environment.name.Equals("ENV_dual_task.xml")) //HACK navigation
            {
                fitness = (1.0f - ip.robots[0].location.distance(new Point2D(environment.POIPosition[4].X, environment.POIPosition[4].Y)) / 650.0f);

                if (fitness < 0)
                {
                    fitness = 0.00001f;
                }
                if (reachedGoal)
                {
                    fitness = 1;
                }

                //  fitness = 1;
            }
            else   //food gathering
            {
                float distFood = (float)(1.0f - (engine.robots[0].location.distance(environment.goal_point) / environment.maxDistance));
                fitness = (collectedFood + distFood) / 4.0f;

                if (fitness < 0)
                {
                    fitness = 0.00001f;
                }
                if (collectedFood >= 4)
                {
                    fitness = 1.0f;
                }

                //  fitness = 1;
            }

            return(fitness);
        }
コード例 #52
0
        void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip)
        {
            // HACK: Both environments are the same, but this hack allows one to treat the food as poison
            bool poison = !environment.name.Equals("ENV_dual_task1.xml");

            if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2)
            {
                if (!poison) //forage
                {
                    ip.agentBrain.switchBrains(0);
                }
                else   //poison
                {
                    ip.agentBrain.switchBrains(1);
                }
            }

            //For food gathering
            if (ip.timeSteps == 1)
            {
                environment.goal_point.x = environment.POIPosition[0].X;
                environment.goal_point.y = environment.POIPosition[0].Y;

                collectedFood = 0;
                POINr         = 0;
            }

            // Schrum: Last sensor is for detecting when food/poison is eaten
            Robot        r = ip.robots[0]; // There should be only one robot in this task
            SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1];

            float d = (float)ip.robots[0].location.distance(environment.goal_point);

            if (d < 20.0f)
            {
                // Need to detect when food or poison is eaten
                s.setSignal(poison ? -1.0 : 1.0);

                collectedFood++;
                POINr++;
                if (POINr > 3)
                {
                    POINr = 0;
                }
                environment.goal_point.x = environment.POIPosition[POINr].X;
                environment.goal_point.y = environment.POIPosition[POINr].Y;
            }
            else
            {// Nothing eaten, so food/poison sensor is 0
                s.setSignal(0.0);
            }
        }
コード例 #53
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            double fitness = 0.0f;

            objectives = null;
            // Only acknowledge actual eating of food, not just proximity
            fitness = collectedFood / 4.0f;

            if (collectedFood >= 4)
            {
                fitness = 1.0f;
            }

            // Schrum
            // HACK: Both environments are the same, but this hack allows one to treat the food as poison
            bool poison = !environment.name.Equals("ENV_dual_task1.xml");

            // Extra aspect of the HACK: The first task loaded excludes its path from the name, but this is not
            // the case for the second task loaded. This is why the negation is used instead of looking up the second
            // task directly, which is named ENV_dual_task11.xml (convention of simulator)
            if (poison)
            {
                fitness *= -0.9;
            }

            // Schrum: For troubleshooting
            //Console.WriteLine(environment.name + " " + fitness + " " + poison);

            return(fitness);
        }
コード例 #54
0
ファイル: poiMO.cs プロジェクト: MadsAnthony/ThesisProject
        void IFitnessFunction.update(SimulatorExperiment engine, Environment environment, instance_pack ip)
        {
            if (!(ip.timeSteps % (int)(1 / engine.timestep) == 0))
            {
                //grid.decay_viewed(0);
                return;
            }


            double[] gl    = new double[3];
            double[] el    = new double[3];
            double[] sense = new double[3];

            int r_ind = 0;

            if (!first && (ip.timeSteps * engine.timestep) > engine.evaluationTime / 2.0)
            {
                allCorrected = true;
                bool[] close = new bool[3];
                // Schrum: Brains don't get switched with preference neurons ... all need to be evaluated
                if (!ip.agentBrain.preferenceNeurons)
                {
                    ip.agentBrain.switchBrains();
                }
                foreach (Robot r in ip.robots)
                {
                    //Schrum: Debugging
                    //Console.WriteLine("Robot id: " + r.id + ", " + r.name);
                    //Console.WriteLine("r.sensors.Count=" + r.sensors.Count);
                    //Console.WriteLine("Last sensor type: " + r.sensors[r.sensors.Count - 1].GetType());

                    if (!ip.agentBrain.multipleBrains ||                  // Schrum: Original condition
                        (r.sensors[r.sensors.Count - 1] is SignalSensor)) // Schrum: Broader condition that also works with pref neurons
                    {
                        //Schrum: Debugging
                        //Console.WriteLine("Switched signal at " + (ip.timeSteps * engine.timestep));

                        SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1];
                        s.setSignal(1.0);
                    }
                    origDist[r_ind]     = r.location.distance(environment.goal_point);
                    origHeadings[r_ind] = r.heading;

                    //checks to see if all points have an agent close to them when the signal fires
                    for (int p = 0; p < environment.POIPosition.Count; p++)
                    {
                        if (r.location.manhattenDistance(new Point2D(environment.POIPosition[p].X, environment.POIPosition[p].Y)) < 15)
                        {
                            close[p] = true;
                        }
                    }

                    //checks to see if agents are being corrected (stopped) when the signal fires
                    if (!r.corrected)
                    {
                        allCorrected = false;
                    }

                    r_ind++;
                }
                r_ind    = 0;
                first    = true;
                allClose = close[0] && close[1] && close[2];
            }



            foreach (Robot r in ip.robots)
            {
                int    p_ind = 0;
                double d2    = r.location.manhattenDistance(environment.goal_point);
                if (first)
                {
                    if (!turned[r_ind])
                    {
                        if (origHeadings[r_ind] != r.heading)
                        {
                            turned[r_ind] = true;
                        }
                    }
                    //if(point.distance(environment.goal_point) <25.0) {
                    //      endList[i]=1.5;
                    //}
                    //else {
                    if (d2 <= 20)
                    {
                        el[r_ind]        = 1;
                        reachGoal[r_ind] = true;
                    }

                    el[r_ind] = Math.Max(0.0, (origDist[r_ind] - d2) / 167.0);


                    //}
                }

                /*
                 * else {
                 *      System.Drawing.Point
                 * p=environment.POIPosition[r_ind];
                 *      Point2D point= new
                 * Point2D((double)p.X,(double)p.Y);
                 *      double d1=point.distance(r.location);
                 *      gl[r_ind]=Math.Max(0.0,(200.0-d1)/200.0);
                 * }
                 */

                foreach (System.Drawing.Point p in environment.POIPosition)
                {
                    Point2D point = new Point2D((double)p.X, (double)p.Y);
                    int     i     = p_ind;
                    double  d1    = point.manhattenDistance(r.location);

                    if (!first)
                    {
                        // Schrum: Magic numbers everywhere! I think robot has reached the POI if within 10 units
                        if (d1 <= 10)
                        {
                            gl[i] = 1;
                        }
                        else
                        {
                            // Otherwise, add (D - d)/D where D = 110 and d = d1 = distance from POI
                            gl[i] = Math.Max(gl[i], (110.0 - d1) / 110.0);
                        }
                    }

                    p_ind += 1;
                }

                sense[r_ind] = 1;
                foreach (ISensor s in r.sensors)
                {
                    if (s is RangeFinder)
                    {
                        if (s.get_value() < sense[r_ind])
                        {
                            sense[r_ind] = s.get_value();
                        }
                    }
                }


                r_ind += 1;
            }



            for (int i = 0; i < 3; i++)
            {
                gotoList[i]   += gl[i];
                endList[i]    += el[i];
                sensorList[i] += sense[i];
            }

            return;
        }
コード例 #55
0
        double IFitnessFunction.calculate(SimulatorExperiment engine, Environment environment, instance_pack ip, out double[] objectives)
        {
            objectives = null;
            //Console.WriteLine("Fitness " + portionAlive);
            // Schrum: Debug

            /*
             * if (portionAlive == 2001)
             * {
             *  for (int i = 0; i < ip.robots[0].history.Count; i++)
             *  {
             *      Console.WriteLine(ip.robots[0].history[i].x + "\t" + ip.robots[0].history[i].y);
             *  }
             *  System.Windows.Forms.Application.Exit();
             *  System.Environment.Exit(1);
             * }
             */
            return(portionAlive);
        }
コード例 #56
0
        //TODO this could be put into a Formation class or something that takes an environment as input
        public override void initializeRobots(AgentBrain agentBrain, Environment e, float headingNoise, float sensorNoise, float effectorNoise, instance_pack ip)
        {
            int num_bots;
            //Set up robots.
            //set up deltas for spacing the robots
            double dx, dy;

            if (overrideTeamFormation)
            {
                dx = Math.Cos(group_orientation / 180.0 * 3.14) * group_spacing;
                dy = Math.Sin(group_orientation / 180.0 * 3.14) * group_spacing;
            }
            else
            {
                dx = Math.Cos(e.group_orientation / 180.0 * 3.14) * e.robot_spacing;
                dy = Math.Sin(e.group_orientation / 180.0 * 3.14) * e.robot_spacing;
            }


            //record z-stack coordinates
            //float zstack = -1.0f;
            //float zdelta = 2.0f / (numberRobots - 1);
            AgentBrain   ab;
            List <Robot> rbts;
            double       _timestep = 0.0;

            if (ip == null)
            {
                ab        = agentBrain;
                rbts      = robots;
                num_bots  = numberRobots;
                _timestep = this.timestep;
            }
            else
            {
                ab        = ip.agentBrain;
                rbts      = new List <Robot>();
                ip.robots = rbts;
                num_bots  = ip.num_rbts;
                _timestep = ip.timestep;
            }

            rbts.Clear();

            int heading = overrideTeamFormation ? robot_heading : e.robot_heading;

            //add robots in their formation according to environment
            double nx, ny;

            for (int num = 0; num < num_bots; num++)
            {
                Robot r = RobotModelFactory.getRobotModel(robotModelName);
                r.rangefinderDensity = this.rangefinderSensorDensity;

                //TODO: Make a variable that controls this

                /*if (e.POIPosition.Count >= numberRobots)
                 * {
                 *  nx = e.POIPosition[num].X;
                 *  ny = e.POIPosition[num].Y;
                 * }
                 * else*/
                {
                    nx = e.start_point.x + num * dx;
                    ny = e.start_point.y + num * dy;
                }
                r.init(num, nx, ny,
                       heading / 180.0 * 3.14, ab, e, sensorNoise, effectorNoise, headingNoise, (float)_timestep);

                r.collisionPenalty = collisionPenalty;
                ab.registerRobot(r);

                //if (overrideDefaultSensorDensity)
                //    r.populateSensors(rangefinderSensorDensity);
                //else
                r.populateSensors();  //TODO populate sensors gets called multiple times. also in robot contructor

                if (agentBrain.zcoordinates == null)
                {
                    r.zstack = 0;
                }
                else
                {
                    r.zstack = ab.zcoordinates[num];
                }
                //zstack;
                rbts.Add(r);

                //zstack += zdelta;
            }

            uint count = 0;

            foreach (ISensor sensor in robots[0].sensors)
            {
                if (sensor is RangeFinder)
                {
                    count++;
                }
            }

            //Console.WriteLine("Range finder count : " + count);
            //By convention the rangefinders are in layer zero so scale that layer
            // Schrum: HOWEVER, if there are no rangefinders, then we do not want to do this.
            // Schrum: Also disabled this when in the FourTasks domain, since it seems to cause problems.
            if (count != 0 && !(this.fitnessFunction is FourTasksFitness))
            { // don't rescale rangefinders if there are none.
                substrateDescription.setNeuronDensity(0, count, 1);
            }
            ab.updateInputDensity();
        }
コード例 #57
0
 public virtual void initializeRobots(AgentBrain agentBrain, Environment e, float headingNoise, float sensorNoise, float effectorNoise, instance_pack x)
 {
 }
コード例 #58
0
        // Schrum: Removed "internal" from this so I could override it ... probably not the best design decision, but it works.
        //protected internal virtual bool runEnvironment(Environment e, instance_pack ip, System.Threading.Semaphore sem)
        protected virtual bool runEnvironment(Environment e, instance_pack ip, System.Threading.Semaphore sem)
        {
            bool collide;

            if (ip == null)
            {
                elapsed += timestep;
                timeSteps++;
            }
            else
            {
                ip.timeSteps++;
                ip.elapsed += ip.timestep;
                ip.env      = e;
            }

            AgentBrain                ab;
            CollisionManager          cm;
            List <Robot>              rbts;
            IFitnessFunction          fit_fun;
            IBehaviorCharacterization beh_char;

            if (ip == null)
            {
                // Schrum2: Debug
                //Console.WriteLine("rbts from experiment");

                ip                  = new instance_pack();
                ab                  = agentBrain;
                cm                  = collisionManager;
                rbts                = robots;
                fit_fun             = fitnessFunction;
                beh_char            = behaviorCharacterization;
                ip.agentBrain       = agentBrain;
                ip.collisionManager = cm;
                ip.robots           = rbts;
                ip.ff               = fit_fun;
                ip.env              = environment;
                ip.bc               = beh_char;
                ip.timeSteps        = timeSteps;
                ip.timestep         = timestep;
            }
            else
            {
                // Schrum2: Debug
                //Console.WriteLine("rbts from ip");

                ab       = ip.agentBrain;
                cm       = ip.collisionManager;
                rbts     = ip.robots;
                fit_fun  = ip.ff;
                beh_char = ip.bc;
            }

            cm.SimulationStepCallback();

            // Schrum2: Debug
            //Console.WriteLine("rbts.Count = " + rbts.Count);

            for (int x = 0; x < rbts.Count; x++)
            {
                if (e.AOIRectangle.Contains((int)rbts[x].location.x, (int)rbts[x].location.y))
                {
                    rbts[x].autopilot = false;
                }
                rbts[x].updateSensors(e, rbts, cm);
                //TODO: Apply input noise?  Maybe should apply it at the sensors (setup with addRobots)
                //TODO unclean. experiment specific
                //if(!rbts[x].autopilot)
                rbts[x].doAction();
                //else
                //	ab.clearANNSignals(rbts[x].zstack);

                //Console.Write(rbts[x].location.x + "\t" + rbts[x].location.y + "\t");
            }
            //Console.WriteLine();

            ab.execute(sem);

            for (int x = 0; x < rbts.Count; x++)
            {
                collide = cm.RobotCollide(rbts[x]);
                rbts[x].collide_last = collide;
                if (collide)
                {
                    rbts[x].onCollision();
                }
            }

            if (ip != null)
            {
                if (beh_char != null)
                {
                    beh_char.update(this, ip);
                }
                fit_fun.update(this, e, ip);
            }
            return(false);
        }
コード例 #59
0
        void IBehaviorCharacterization.update(SimulatorExperiment exp, instance_pack i)
        {
            //TODO reimplement
            //    double accum=0.0;
            //            //if the timestep is changed, this needs to be changed as well.  First # is 20/timestep, Second # is 1/timestep


            //    if (!(engine.timeSteps >= 50 && engine.timeSteps % 25 == 0))
            //        return;

            //    //double minDistance;
            //    //int closestAgentIndex, agentIndex, robotIndex;

            //    //check to see which robots are in, we don't want to calculate distance between agents outside the room
            //   /* bool allIn = true;
            //    bool[] isIn = new bool[engine.robots.Count];
            //    for (robotIndex = 0; robotIndex < engine.robots.Count; robotIndex++)
            //    {
            //        if (engine.robots[robotIndex].autopilot)
            //            return;
            //        //isIn[robotIndex] = false;
            //        else
            //            isIn[robotIndex] = environment.AOIRectangle.Contains((int)engine.robots[robotIndex].location.x, (int)engine.robots[robotIndex].location.y);
            //        if (!isIn[robotIndex])
            //            allIn = false;
            //    }

            //    if (!allIn)
            //        return;*/

            //    int dim = engine.grid.coarseness;
            //    for (int x = 0; x < dim; x++)
            //    {
            //        for (int y = 0; y < dim; y++)
            //        {
            //            int gx = (int)((double)x * engine.grid.gridx) + (int)(engine.grid.gridx / 2.0);
            //            int gy = (int)((double)y * engine.grid.gridy) + (int)(engine.grid.gridy / 2.0);
            //            if ((environment.AOIRectangle.Contains(gx, gy)))
            //                //if ((engine.grid.grid[x, y].viewed) != 0.0)
            //                 //   accum += 1.0;
            //                accum += engine.grid.grid[x,y].viewed;
            //        }
            //    }
            //    bc.Add(accum);


            ////now update the distance values

            //    /*for (robotIndex = 0; robotIndex < engine.robots.Count; robotIndex++)
            //    {
            //        if (!isIn[robotIndex])
            //            continue;
            //        closestAgentIndex = -1;
            //        minDistance = double.MaxValue;
            //        for (agentIndex = 0; agentIndex < engine.robots.Count; agentIndex++)
            //        {
            //            if (!isIn[agentIndex] || agentIndex == robotIndex)
            //                continue;
            //            double dist = engine.euclideanDistance(engine.robots[robotIndex], engine.robots[agentIndex]);
            //            if (dist < minDistance)
            //            {
            //                minDistance = dist;
            //                closestAgentIndex = agentIndex;
            //            }
            //        }

            //        if (closestAgentIndex == -1)
            //            continue;

            //        engine.robots[robotIndex].distanceClosestPOI.Add((minDistance)/564.0);

            //        //if (engine.timeSteps >= 20 * 20)
            //        //    engine.robots[robotIndex].distanceClosestPOI.Add(5 * (minDistance));

            //    }*/
        }
コード例 #60
0
        // Schrum: Had to remove the internal from this too so that I could override it in AdversarialRoomClearingExperiment
        public override double evaluateNetwork(INetwork network, out SharpNeatLib.BehaviorType behavior, System.Threading.Semaphore sem)
        {
            // Schrum: Used when punishing links in substrate networks.
            int linksInSubstrate = 0;

            double fitness = multiplicativeFitness ? 1 : 0;

            behavior = new SharpNeatLib.BehaviorType();

            // Schrum: Why is there a magic number 6 here?
            double[] accumObjectives = new double[6];
            for (int i = 0; i < 6; i++)
            {
                accumObjectives[i] = 0.0;
            }

            // Schrum: Special handling for FourTasks domain again
            if (fitnessFunction is FourTasksFitness)
            {
                // Pass the experiment into the fitness function once so its parameters can be changed for each environment
                ((FourTasksFitness)fitnessFunction).setExperiment(this);
            }

            IFitnessFunction          fit_copy;
            IBehaviorCharacterization bc_copy;
            //CollisionManager cm;
            int envNum = 0;

            // Schrum: Separate fitness for each environment.
            // Should this be put in accumObjectives instead? What is that for?
            double[] environmentScores = new double[environmentList.Count];
            foreach (Environment env2 in environmentList)
            {
                //Console.WriteLine("====================Environment loop " + envNum);
                // Schrum: moved this here to deal with consistency issues
                instance_pack inst = new instance_pack();

                fit_copy = fitnessFunction.copy();
                if (behaviorCharacterization != null)
                {
                    bc_copy = behaviorCharacterization.copy();
                    inst.bc = bc_copy;
                }
                inst.ff = fit_copy;

                // Schrum: Just add special handling for FourTasks to get settings right
                if (inst.ff is FourTasksFitness)
                {
                    // FourTasks needs to know the current environment. Experiment reference provided earlier.
                    ((FourTasksFitness)inst.ff).setupFitness(envNum);
                }

                // Schrum: moved here from outside/before the loop. Most domains use the same timestep in each environment,
                // but the FourTasks domain is an exception to this, so the setting needed to be moved after setupFitness
                inst.timestep = timestep;
                double   tempFit   = 0;
                double[] fitnesses = new double[timesToRunEnvironments];
                SharpNeatLib.Maths.FastRandom evalRand = new SharpNeatLib.Maths.FastRandom(100);
                for (int evals = 0; evals < timesToRunEnvironments; evals++)
                {
                    int agent_trials = timesToRunEnvironments;

                    inst.num_rbts = this.numberRobots;

                    Environment env = env2.copy();

                    double evalTime = this.evaluationTime;

                    inst.eval = evals;
                    env.seed  = evals;
                    env.rng   = new SharpNeatLib.Maths.FastRandom(env.seed + 1);


                    int noise_lev = (int)this.sensorNoise + 1;

                    float new_sn = evalRand.NextUInt() % noise_lev;
                    float new_ef = evalRand.NextUInt() % noise_lev;


                    inst.agentBrain = new AgentBrain(homogeneousTeam, inst.num_rbts, substrateDescription, network, normalizeWeights, adaptableANN, modulatoryANN, multibrain, numBrains, evolveSubstrate, preferenceNeurons, forcedSituationalPolicyGeometry);
                    // Add up the links in each substrate brain
                    // Recalculates each evaluation, but resets to 0 each time.
                    linksInSubstrate = 0;
                    foreach (INetwork b in inst.agentBrain.multiBrains)
                    {
                        linksInSubstrate += b.NumLinks;
                    }
                    if (eval)
                    {
                        Console.WriteLine("Links In Substrate: " + linksInSubstrate);
                    }
                    initializeRobots(inst.agentBrain, env, headingNoise, new_sn, new_ef, inst);

                    inst.elapsed  = 0;
                    inst.timestep = this.timestep;
                    //Console.WriteLine(this.timestep);

                    inst.timeSteps        = 0;
                    inst.collisionManager = collisionManager.copy();
                    inst.collisionManager.Initialize(env, this, inst.robots);

                    try
                    {
                        //Console.WriteLine("Environment " + environmentName + " " + envNum);
                        while (inst.elapsed < evalTime)
                        {
                            // Schrum2: Only called for non-visual evaluations
                            //Console.WriteLine("MAE:" + inst.elapsed + "/" + evalTime);
                            runEnvironment(env, inst, sem);
                        }
                    }
                    catch (Exception e)
                    {
                        for (int x = 0; x < inst.robots.Count; x++)
                        {
                            for (int y = 0; y < inst.robots[x].history.Count; y++)
                            {
                                Console.WriteLine(x + " " + y + " " + inst.robots[x].history[y].x + " " + inst.robots[x].history[y].y);
                            }
                        }


                        behavior = new SharpNeatLib.BehaviorType();
                        Console.WriteLine(e.Message);
                        Console.WriteLine(e.StackTrace);
                        throw (e);
                    }


                    double thisFit = inst.ff.calculate(this, env, inst, out behavior.objectives);

                    //Console.WriteLine(env.name + ": Fitness for one eval: " + thisFit);
                    fitnesses[evals] = thisFit;
                    tempFit         += thisFit;


                    if (behavior != null && behavior.objectives != null && inst.bc != null)
                    {
                        for (int i = 0; i < behavior.objectives.Length; i++)
                        {
                            accumObjectives[i] += behavior.objectives[i];
                        }

                        if (behavior.behaviorList == null)
                        {
                            behavior.behaviorList = new List <double>();
                        }
                        behavior.behaviorList.AddRange(inst.bc.calculate(this, inst));

                        inst.bc.reset();
                    }

                    inst.ff.reset();
                }
                // Schrum: Save each fitness separately
                environmentScores[envNum] = tempFit;
                envNum++; // go to the next environment

                // Schrum: Product fitness might encourage better performance in all environments
                if (multiplicativeFitness)
                {
                    fitness *= tempFit / timesToRunEnvironments;
                }
                else // default is sum/average
                {
                    fitness += tempFit / timesToRunEnvironments;
                }
            }

            // Punish CPPN modules: Must use with multiobjective option
            if (cppnModuleCost)
            {
                behavior.objectives = new double[] { fitness / environmentList.Count, -network.NumOutputModules };
            }
            // Punish CPPN links: Must use with multiobjective option
            else if (cppnLinkCost)
            {
                behavior.objectives = new double[] { fitness / environmentList.Count, -network.NumLinks };
            }
            // Punish substrate links: Must use with multiobjective option
            else if (substrateLinkCost)
            {
                behavior.objectives = new double[] { fitness / environmentList.Count, -linksInSubstrate };
            }
            else if (fitnessFunction is FourTasksFitness)
            { // Schrum: Special handling for FourTasksFitness ... could I just use accumObjectives?
                behavior.objectives = environmentScores;
            }
            else // Original default
            {
                behavior.objectives = accumObjectives;
            }
            behavior.modules        = network.NumOutputModules;
            behavior.cppnLinks      = network.NumLinks;
            behavior.substrateLinks = linksInSubstrate;

            //Console.WriteLine("Total: " + (fitness / environmentList.Count));
            // Schrum: Averaging helps normalize range when adding fitness values, but not when multiplying fitness values
            return(multiplicativeFitness ? fitness : fitness / environmentList.Count);
        }