public PieSliceSensorArray(Robot o) { signalsSensors = new List<SignalSensor>(4); radarAngles1 = new List<float>(); radarAngles2 = new List<float>(); //radar = new List<float>(); //TODO make the number of slices adjustable //define the radar sensors radarAngles1.Add(315.0f); radarAngles2.Add(405.0f); radarAngles1.Add(45.0f); radarAngles2.Add(135.0f); radarAngles1.Add(135.0f); radarAngles2.Add(225.0f); radarAngles1.Add(225.0f); radarAngles2.Add(315.0f); for (int i = 0; i < 4; i++) { SignalSensor s = new SignalSensor(o); signalsSensors.Add(s); o.sensors.Add(signalsSensors[i]); } owner = o; }
void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip) { if (!(ip.timeSteps % (int)(1 / ip.timestep) == 0)) { //grid.decay_viewed(0); return; } bool all_in = true; double a_accum = 0.00000001; double a_saccum = 0.00000001; foreach (Robot r in ip.robots) { if (!r.autopilot) { foreach (ISensor s in r.sensors) { if (s is SignalSensor) { SignalSensor ss = (SignalSensor)s; double val = ss.get_value(); val += 0.05; if (val > 1.0) { val = 1.0; } ss.setSignal(val); } } } if ((environment.AOIRectangle.Contains((int)r.location.x, (int)r.location.y))) { a_accum += 1.0 / (nearest(ip, r, environment)); if (r.corrected) { a_saccum += nearest(ip, r, environment); } //else // a_saccum+=1.0; } else { all_in = false; } } if (all_in) { accum += ((double)ip.robots.Count) / (a_accum); stopaccum += a_saccum / ((double)ip.robots.Count); } }
void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip) { // HACK: Both environments are the same, but this hack allows one to treat the food as poison bool poison = !environment.name.Equals("ENV_dual_task1.xml"); if (Experiment.multibrain && !Experiment.preferenceNeurons && Experiment.numBrains == 2) { if (!poison) //forage { ip.agentBrain.switchBrains(0); } else //poison { ip.agentBrain.switchBrains(1); } } //For food gathering if (ip.timeSteps == 1) { environment.goal_point.x = environment.POIPosition[0].X; environment.goal_point.y = environment.POIPosition[0].Y; collectedFood = 0; POINr = 0; } // Schrum: Last sensor is for detecting when food/poison is eaten Robot r = ip.robots[0]; // There should be only one robot in this task SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1]; float d = (float)ip.robots[0].location.distance(environment.goal_point); if (d < 20.0f) { // Need to detect when food or poison is eaten s.setSignal(poison ? -1.0 : 1.0); collectedFood++; POINr++; if (POINr > 3) { POINr = 0; } environment.goal_point.x = environment.POIPosition[POINr].X; environment.goal_point.y = environment.POIPosition[POINr].Y; } else {// Nothing eaten, so food/poison sensor is 0 s.setSignal(0.0); } }
void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip) { grid = ((GridCollision)(ip.collisionManager)).grid; if (!(ip.timeSteps % (int)(1 / Experiment.timestep) == 0)) { return; } int dim = grid.coarseness; for (int x = 0; x < dim; x++) { for (int y = 0; y < dim; y++) { int gx = (int)((double)x * grid.gridx) + (int)(grid.gridx / 2.0); int gy = (int)((double)y * grid.gridy) + (int)(grid.gridy / 2.0); if ((environment.AOIRectangle.Contains(gx, gy))) { accum += grid.grid[x, y].viewed; stop_accum += grid.grid[x, y].viewed2; } } } foreach (Robot r in ip.robots) { if (!r.autopilot) { foreach (ISensor s in r.sensors) { if (s is SignalSensor) { SignalSensor ss = (SignalSensor)s; double val = ss.get_value(); val += 0.05; if (val > 1.0) { val = 1.0; } ss.setSignal(val); } } } } grid.decay_viewed(0); //grid.decay_viewed(.95); }
void IFitnessFunction.update(SimulatorExperiment Experiment, Environment environment, instance_pack ip) { if (!(Experiment.timeSteps % (int)(1 / Experiment.timestep) == 0)) { //grid.decay_viewed(0); return; } foreach (Robot r in ip.robots) { if (!r.autopilot) { foreach (ISensor s in r.sensors) { if (s is SignalSensor) { SignalSensor ss = (SignalSensor)s; double val = ss.get_value(); val += 0.05; if (val > 1.0) { val = 1.0; } ss.setSignal(val); } } } } double x1 = (double)environment.AOIRectangle.Left; double y1 = (double)environment.AOIRectangle.Top; double x2 = (double)environment.AOIRectangle.Right; double y2 = (double)environment.AOIRectangle.Bottom; int steps = 10; accum += test_interpolation(ip, x1, y1, x2, y1, steps); accum += test_interpolation(ip, x2, y1, x2, y2, steps); accum += test_interpolation(ip, x2, y2, x2, y1, steps); accum += test_interpolation(ip, x2, y1, x1, y1, steps); }
void IFitnessFunction.update(SimulatorExperiment engine, Environment environment, instance_pack ip) { if (!(ip.timeSteps % (int)(1 / engine.timestep) == 0)) { //grid.decay_viewed(0); return; } double[] gl = new double[3]; double[] el = new double[3]; double[] sense = new double[3]; int r_ind = 0; if (!first && (ip.timeSteps * engine.timestep) > engine.evaluationTime / 2.0) { allCorrected = true; bool[] close = new bool[3]; // Schrum: Brains don't get switched with preference neurons ... all need to be evaluated if (!ip.agentBrain.preferenceNeurons) { ip.agentBrain.switchBrains(); } foreach (Robot r in ip.robots) { //Schrum: Debugging //Console.WriteLine("Robot id: " + r.id + ", " + r.name); //Console.WriteLine("r.sensors.Count=" + r.sensors.Count); //Console.WriteLine("Last sensor type: " + r.sensors[r.sensors.Count - 1].GetType()); if (!ip.agentBrain.multipleBrains || // Schrum: Original condition (r.sensors[r.sensors.Count - 1] is SignalSensor)) // Schrum: Broader condition that also works with pref neurons { //Schrum: Debugging //Console.WriteLine("Switched signal at " + (ip.timeSteps * engine.timestep)); SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1]; s.setSignal(1.0); } origDist[r_ind] = r.location.distance(environment.goal_point); origHeadings[r_ind] = r.heading; //checks to see if all points have an agent close to them when the signal fires for (int p = 0; p < environment.POIPosition.Count; p++) { if (r.location.manhattenDistance(new Point2D(environment.POIPosition[p].X, environment.POIPosition[p].Y)) < 15) { close[p] = true; } } //checks to see if agents are being corrected (stopped) when the signal fires if (!r.corrected) { allCorrected = false; } r_ind++; } r_ind = 0; first = true; allClose = close[0] && close[1] && close[2]; } foreach (Robot r in ip.robots) { int p_ind = 0; double d2 = r.location.manhattenDistance(environment.goal_point); if (first) { if (!turned[r_ind]) { if (origHeadings[r_ind] != r.heading) { turned[r_ind] = true; } } //if(point.distance(environment.goal_point) <25.0) { // endList[i]=1.5; //} //else { if (d2 <= 20) { el[r_ind] = 1; reachGoal[r_ind] = true; } el[r_ind] = Math.Max(0.0, (origDist[r_ind] - d2) / 167.0); //} } /* * else { * System.Drawing.Point * p=environment.POIPosition[r_ind]; * Point2D point= new * Point2D((double)p.X,(double)p.Y); * double d1=point.distance(r.location); * gl[r_ind]=Math.Max(0.0,(200.0-d1)/200.0); * } */ foreach (System.Drawing.Point p in environment.POIPosition) { Point2D point = new Point2D((double)p.X, (double)p.Y); int i = p_ind; double d1 = point.manhattenDistance(r.location); if (!first) { // Schrum: Magic numbers everywhere! I think robot has reached the POI if within 10 units if (d1 <= 10) { gl[i] = 1; } else { // Otherwise, add (D - d)/D where D = 110 and d = d1 = distance from POI gl[i] = Math.Max(gl[i], (110.0 - d1) / 110.0); } } p_ind += 1; } sense[r_ind] = 1; foreach (ISensor s in r.sensors) { if (s is RangeFinder) { if (s.get_value() < sense[r_ind]) { sense[r_ind] = s.get_value(); } } } r_ind += 1; } for (int i = 0; i < 3; i++) { gotoList[i] += gl[i]; endList[i] += el[i]; sensorList[i] += sense[i]; } return; }
void IBehaviorCharacterization.update(SimulatorExperiment engine) { Environment environment = engine.environment; if (!(engine.timeSteps % 25 == 0)) //(int)(0.5 / engine.timestep) == 0)) { //grid.decay_viewed(0); return; } double[] gl = new double[3]; double[] el = new double[3]; for (int i = 0; i < engine.robots.Count; i++) { gl[i] = 0.0; } for (int i = 0; i < engine.robots.Count; i++) { el[i] = 0.0; } int r_ind = 0; if (!last && (engine.timeSteps * engine.timestep) > 14.0) { foreach (Robot r in engine.robots) { bc.Add(r.location.x); bc.Add(r.location.y); last = true; } } if (!first && (engine.timeSteps * engine.timestep) > 6.0) { foreach (Robot r in engine.robots) { SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1]; s.setSignal(1.0); bc.Add(r.location.x); bc.Add(r.location.y); origDist[r_ind] = r.location.distance(environment.goal_point); r_ind++; } first = true; r_ind = 0; } foreach (Robot r in engine.robots) { double d2 = r.location.distance(environment.goal_point); if (first) { //if(point.distance(environment.goal_point) <25.0) { // endList[i]=1.5; //} //else { el[r_ind] = Math.Max(el[r_ind], (origDist[r_ind] - d2) / 200.0); //} } /* * else { * System.Drawing.Point p=environment.POIPosition[r_ind]; * Point2D point= new Point2D((double)p.X,(double)p.Y); * double d1=point.distance(r.location); * gl[r_ind]=Math.Max(0.0,(200.0-d1)/200.0); * } */ int p_ind = 0; foreach (System.Drawing.Point p in environment.POIPosition) { Point2D point = new Point2D((double)p.X, (double)p.Y); int i = p_ind; double d1 = point.distance(r.location); if (!first) { gl[i] = Math.Max(gl[i], (200.0 - d1) / 200.0); } p_ind += 1; } r_ind += 1; } for (int i = 0; i < 3; i++) { gotoList[i] += gl[i]; endList[i] += el[i]; } return; }
void IFitnessFunction.update(SimulatorExperiment engine, Environment environment, instance_pack ip) { if (!(ip.timeSteps % (int)(1 / engine.timestep) == 0)) { //grid.decay_viewed(0); return; } double[] gl = new double[3]; double[] el = new double[3]; double[] sense = new double[3]; int r_ind = 0; if (!first && (ip.timeSteps * engine.timestep) > engine.evaluationTime / 2.0) { allCorrected = true; bool[] close = new bool[3]; ip.agentBrain.switchBrains(); foreach (Robot r in ip.robots) { if (!ip.agentBrain.multipleBrains) { SignalSensor s = (SignalSensor)r.sensors[r.sensors.Count - 1]; s.setSignal(1.0); } origDist[r_ind] = r.location.distance(environment.goal_point); origHeadings[r_ind] = r.heading; //checks to see if all points have an agent close to them when the signal fires for (int p = 0; p < environment.POIPosition.Count; p++) { if (r.location.manhattenDistance(new Point2D(environment.POIPosition[p].X, environment.POIPosition[p].Y)) < 15) { close[p] = true; } } //checks to see if agents are being corrected (stopped) when the signal fires if (!r.corrected) { allCorrected = false; } r_ind++; } r_ind = 0; first = true; allClose = close[0] && close[1] && close[2]; } foreach (Robot r in ip.robots) { int p_ind = 0; double d2 = r.location.manhattenDistance(environment.goal_point); if (first) { if (!turned[r_ind]) { if (origHeadings[r_ind] != r.heading) { turned[r_ind] = true; } } //if(point.distance(environment.goal_point) <25.0) { // endList[i]=1.5; //} //else { if (d2 <= 20) { el[r_ind] = 1; reachGoal[r_ind] = true; } el[r_ind] = Math.Max(0.0, (origDist[r_ind] - d2) / 167.0); //} } /* * else { * System.Drawing.Point * p=environment.POIPosition[r_ind]; * Point2D point= new * Point2D((double)p.X,(double)p.Y); * double d1=point.distance(r.location); * gl[r_ind]=Math.Max(0.0,(200.0-d1)/200.0); * } */ foreach (System.Drawing.Point p in environment.POIPosition) { Point2D point = new Point2D((double)p.X, (double)p.Y); int i = p_ind; double d1 = point.manhattenDistance(r.location); if (!first) { if (d1 <= 10) { gl[i] = 1; } else { gl[i] = Math.Max(gl[i], (110.0 - d1) / 110.0); } } p_ind += 1; } sense[r_ind] = 1; foreach (ISensor s in r.sensors) { if (s is RangeFinder) { if (s.get_value() < sense[r_ind]) { sense[r_ind] = s.get_value(); } } } r_ind += 1; } for (int i = 0; i < 3; i++) { gotoList[i] += gl[i]; endList[i] += el[i]; sensorList[i] += sense[i]; } return; }