protected virtual void addRandomPieceOfFood(AgentsEnvironment env) { int x = random.Next(env.Width); int y = random.Next(env.Height); Food newFood = new Food(x, y); env.Add(newFood); }
protected void removeEatenAndCreateNewFood(AgentsEnvironment env, IEnumerable <Food> eatenFood) { foreach (Food food in eatenFood) { env.Remove(food); addRandomPieceOfFood(env); } }
private List <Food> getFood(AgentsEnvironment env) { List <Food> food = new List <Food>(); foreach (Food f in env.getAgents().OfType <Food>()) { food.Add(f); } return(food); }
private List <Agent> getFishes(AgentsEnvironment env) { List <Agent> fishes = new List <Agent>(); foreach (Agent agent in env.getAgents().OfType <Agent>()) { fishes.Add(agent); } return(fishes); }
public virtual void notify(AgentsEnvironment env) { var eatenFood = getEatenFood(env); score += eatenFood.Count; LinkedList <Agent> collidedFishes = getCollidedFishes(env); score -= collidedFishes.Count * 0.5; removeEatenAndCreateNewFood(env, eatenFood); }
protected LinkedList <Food> getEatenFood(AgentsEnvironment env) { LinkedList <Food> eatenFood = new LinkedList <Food>(); foreach (Food food in getFood(env)) { foreach (Agent fish in getFishes(env)) { double distanceToFood = module(food.X - fish.X, food.Y - fish.Y); if (distanceToFood < minEatDistance) { eatenFood.AddLast(food); break; } } } return(eatenFood); }
public static double Calculate(OptimizableNeuralNetwork chromosome) { // TODO maybe, its better to initialize these parameters in constructor const int width = 200; const int height = 200; int agentsCount = 10; int foodCount = 5; int environmentIterations = 50; AgentsEnvironment env = new AgentsEnvironment(width, height); for (int i = 0; i < agentsCount; i++) { int x = random.Next(width); int y = random.Next(height); double direction = 2 * Math.PI * random.NextDouble(); NeuralNetworkDrivenAgent agent = new NeuralNetworkDrivenAgent(x, y, direction); agent.setBrain(chromosome.Clone() as NeuralNetwork); env.Add(agent); } for (int i = 0; i < foodCount; i++) { Food food = newPieceOfFood(width, height); env.Add(food); } EatenFoodObserver tournamentListener = new FitnessObserver(width, height); env.AgentEvent += tournamentListener.notify; for (int i = 0; i < environmentIterations; i++) { env.timeStep(); } double score = tournamentListener.getScore(); return(1.0 / score); }
/** * Synchronization prevents from race condition when trying to set new * brain, while method "interact" runs <br/> * <br/> * TODO Maybe consider to use non-blocking technique. But at the moment this * simplest solution doesn't cause any overheads */ public override void Interact(AgentsEnvironment env) { lock (ThisLock) { var nnInputs = createNnInputs(env); activateNeuralNetwork(nnInputs); int neuronsCount = brain.NeuronsCount; double deltaAngle = brain.GetAfterActivationSignal(neuronsCount - 2); double deltaSpeed = brain.GetAfterActivationSignal(neuronsCount - 1); deltaSpeed = avoidNaNAndInfinity(deltaSpeed); deltaAngle = avoidNaNAndInfinity(deltaAngle); Angle += normalizeDeltaAngle(deltaAngle); Speed = normalizeSpeed(Speed + deltaSpeed); Move(); } }
private LinkedList <Agent> getCollidedFishes(AgentsEnvironment env) { LinkedList <Agent> collidedFishes = new LinkedList <Agent>(); List <Agent> allFishes = getFishes(env); int fishesCount = allFishes.Count; for (int i = 0; i < (fishesCount - 1); i++) { Agent firstFish = allFishes[i]; for (int j = i + 1; j < fishesCount; j++) { Agent secondFish = allFishes[j]; double distanceToSecondFish = module(firstFish.X - secondFish.X, firstFish.Y - secondFish.Y); if (distanceToSecondFish < maxFishesDistance) { collidedFishes.AddLast(secondFish); } } } return(collidedFishes); }
protected List <double> createNnInputs(AgentsEnvironment environment) { // Find nearest food Food nearestFood = null; double nearestFoodDist = double.MaxValue; foreach (Food currFood in environment.getAgents().OfType <Food>()) { // agent can see only ahead if (this.inSight(currFood)) { double currFoodDist = distanceTo(currFood); if ((nearestFood == null) || (currFoodDist <= nearestFoodDist)) { nearestFood = currFood; nearestFoodDist = currFoodDist; } } } // Find nearest agent Agent nearestAgent = null; double nearestAgentDist = maxAgentsDistance; foreach (Agent currAgent in environment.getAgents().OfType <Agent>()) { // agent can see only ahead if ((this != currAgent) && (this.inSight(currAgent))) { double currAgentDist = this.distanceTo(currAgent); if (currAgentDist <= nearestAgentDist) { nearestAgent = currAgent; nearestAgentDist = currAgentDist; } } } var nnInputs = new List <double>(); if (nearestFood != null) { double foodDirectionVectorX = nearestFood.X - X; double foodDirectionVectorY = nearestFood.Y - Y; // left/right cos double foodDirectionCosTeta = Math.Sign(pseudoScalarProduct(Rx, Ry, foodDirectionVectorX, foodDirectionVectorY)) * cosTeta(Rx, Ry, foodDirectionVectorX, foodDirectionVectorY); nnInputs.Add(FOOD); nnInputs.Add(nearestFoodDist); nnInputs.Add(foodDirectionCosTeta); } else { nnInputs.Add(EMPTY); nnInputs.Add(0.0); nnInputs.Add(0.0); } if (nearestAgent != null) { double agentDirectionVectorX = nearestAgent.X - X; double agentDirectionVectorY = nearestAgent.Y - Y; // left/right cos double agentDirectionCosTeta = Math.Sign(pseudoScalarProduct(Rx, Ry, agentDirectionVectorX, agentDirectionVectorY)) * cosTeta(Rx, Ry, agentDirectionVectorX, agentDirectionVectorY); nnInputs.Add(AGENT); nnInputs.Add(nearestAgentDist); nnInputs.Add(agentDirectionCosTeta); } else { nnInputs.Add(EMPTY); nnInputs.Add(0.0); nnInputs.Add(0.0); } return(nnInputs); }
public abstract void Interact(AgentsEnvironment env);
protected void move(AgentsEnvironment env) { X += -Math.Sin(angle) * speed; Y += Math.Cos(angle) * speed; }
public override void Interact(AgentsEnvironment env) { move(env); }
protected void addRandomPieceOfFood(AgentsEnvironment env) { Food newFood = newPieceOfFood(width, height); env.Add(newFood); }