public void MyTestInitialize() { int height = 500; int width = 500; List<PlantSpecies> species = new List<PlantSpecies>() { new PlantSpecies(0) { Radius = 5, Reward = 100, Count = 1 } }; List<Predator> predators = new List<Predator>(); _agent = new ActionListAgent(0, new List<float[]>()) { X = 250, Y = 250 }; List<ForagingAgent> agents = new List<ForagingAgent>() { _agent }; _world = new World(agents, height, width, species, predators, PlantLayoutStrategies.Uniform); _world.SensorLookup = new SensorDictionary((int)_world.AgentHorizon, _world.Width, _world.Height); _world.AgentHorizon = 50; }
public DiversityAnalyzer(World world) { _world = world; locations = new int[numlocations][]; for (int i = 0; i < numlocations; i++) locations[i] = new int[2]; orientations = new int[100]; velocities = new float[100]; for (int i = 0; i < numlocations; i++) { locations[i][0] = _random.Next() % world.Width; locations[i][1] = _random.Next() % world.Height; orientations[i] = _random.Next() % 360; velocities[i] = (float)_random.NextDouble() * 5f; } }
/// <summary> /// Creates a new Q-Learning teacher. /// </summary> /// <param name="id">The unique ID of this teacher.</param> /// <param name="brain">The neural network value function for this teacher. It should have (2 + # of sensors) input nodes and 1 output node.</param> /// <param name="numOrientationActions">The number of buckets to discretize the orientation action spacer into.</param> /// <param name="numVelocityActions">The number of buckets to discretize the velocity action spacer into.</param> /// <param name="world">The world this teacher will be evaluated in.</param> public QLearningAgent(int id, int speciesId, IBlackBox brain, bool agentsNavigate, bool agentsHide, int numOrientationActions, int numVelocityActions, World world) : base(id, speciesId, brain, agentsNavigate, agentsHide) { Debug.Assert(brain.OutputCount == 1, "Incorrect number of outputs in neural network!"); _numVelocityActions = numVelocityActions; _numOrientationActions = numOrientationActions; _random = new Random(); _prevState = new double[brain.InputCount]; _observedValue = new double[1]; world.PlantEaten += new World.PlantEatenHandler(world_PlantEaten); MaxReward = 200; LearningRate = DEFAULT_LEARNING_RATE; DiscountFactor = DEFAULT_DISCOUNT_FACTOR; Epsilon = DEFAULT_EPSILON; // The backprop learning rate is equivalent to the Q-Learning learning rate. ((FastCyclicNetwork)Brain).BackpropLearningRate = LearningRate; }
/// <summary> /// Initialize the experiment with some optional XML configutation data. /// </summary> public void Initialize(string name, XmlElement xmlConfig) { _name = name; _populationSize = XmlUtils.GetValueAsInt(xmlConfig, "PopulationSize"); _specieCount = XmlUtils.GetValueAsInt(xmlConfig, "SpecieCount"); _activationScheme = ExperimentUtils.CreateActivationScheme(xmlConfig, "Activation"); _complexityRegulationStr = XmlUtils.TryGetValueAsString(xmlConfig, "ComplexityRegulationStrategy"); _complexityThreshold = XmlUtils.TryGetValueAsInt(xmlConfig, "ComplexityThreshold"); _description = XmlUtils.TryGetValueAsString(xmlConfig, "Description"); _timeStepsPerGeneration = (ulong)XmlUtils.GetValueAsInt(xmlConfig, "TimeStepsPerGeneration"); _stepReward = XmlUtils.GetValueAsInt(xmlConfig, "StepReward"); _agentType =(AgentTypes) Enum.Parse(typeof(AgentTypes), XmlUtils.TryGetValueAsString(xmlConfig, "AgentType")); _plantLayout = (PlantLayoutStrategies)Enum.Parse(typeof(PlantLayoutStrategies), XmlUtils.TryGetValueAsString(xmlConfig, "PlantLayout")); _paradigm = (EvolutionParadigm)Enum.Parse(typeof(EvolutionParadigm), XmlUtils.TryGetValueAsString(xmlConfig, "EvolutionParadigm")); bool? diverse = XmlUtils.TryGetValueAsBool(xmlConfig, "LogDiversity"); if (diverse.HasValue && diverse.Value) _logDiversity = true; if (_agentType == AgentTypes.Social) { var memSection = xmlConfig.GetElementsByTagName("Memory")[0] as XmlElement; _memory = (MemoryParadigm)Enum.Parse(typeof(MemoryParadigm), XmlUtils.TryGetValueAsString(memSection, "Paradigm")); SocialAgent.DEFAULT_MEMORY_SIZE = XmlUtils.GetValueAsInt(memSection, "Size"); if (_memory == MemoryParadigm.IncrementalGrowth) { _memGens = XmlUtils.GetValueAsInt(memSection, "GrowthGenerations"); _maxMemorySize = XmlUtils.GetValueAsInt(memSection, "MaxSize"); } _teaching = (TeachingParadigm)Enum.Parse(typeof(TeachingParadigm), XmlUtils.TryGetValueAsString(xmlConfig, "TeachingParadigm")); } var species = new List<PlantSpecies>(); var plants = xmlConfig.GetElementsByTagName("Plant"); for (int i = 0; i < plants.Count; i++) { var plant = plants[i] as XmlElement; species.Add(new PlantSpecies(i) { Name = XmlUtils.GetValueAsString(plant, "Name"), Radius = XmlUtils.GetValueAsInt(plant, "Radius"), Reward = XmlUtils.GetValueAsInt(plant, "Reward"), Count = XmlUtils.GetValueAsInt(plant, "Count") }); } Random random = new Random(); var agents = new List<ForagingAgent>(); const int NUM_AGENTS = 10; for (int i = 0; i < NUM_AGENTS; i++) { agents.Add(new SpinningAgent(i) { X = random.Next(500), Y = random.Next(500), Orientation = random.Next(360) }); } List<Predator> predators = new List<Predator>(); _predCount = XmlUtils.GetValueAsInt(xmlConfig, "Predators"); var predStr = XmlUtils.TryGetValueAsString(xmlConfig, "PredatorDistribution"); if (predStr != null) PredatorDistribution = (PredatorDistributionTypes)Enum.Parse(typeof(PredatorDistributionTypes), predStr, true); _predTypes = XmlUtils.GetValueAsInt(xmlConfig, "PredatorTypes"); if (PredatorDistribution == PredatorDistributionTypes.Alternating) _predGens = XmlUtils.GetValueAsDouble(xmlConfig, "PredatorGenerations"); _distinguishPreds = XmlUtils.GetValueAsBool(xmlConfig, "DistinguishPredators"); _world = new World(agents, XmlUtils.GetValueAsInt(xmlConfig, "WorldHeight"), XmlUtils.GetValueAsInt(xmlConfig, "WorldHeight"), species, predators) { AgentHorizon = XmlUtils.GetValueAsInt(xmlConfig, "AgentHorizon"), PlantLayoutStrategy = _plantLayout, StepReward = _stepReward, PredatorTypes = _predTypes }; var outputs = XmlUtils.TryGetValueAsInt(xmlConfig, "Outputs"); var navigation = XmlUtils.TryGetValueAsBool(xmlConfig, "AgentsNavigate"); var hiding = XmlUtils.TryGetValueAsBool(xmlConfig, "AgentsHide"); _navigationEnabled = navigation.HasValue ? navigation.Value : false; _hidingEnabled = hiding.HasValue ? hiding.Value : false; if (!outputs.HasValue) { if (_navigationEnabled || _hidingEnabled) _outputs = (_navigationEnabled ? 2 : 0) + (_hidingEnabled ? _predTypes + 1 : 0); else _outputs = outputs.HasValue ? outputs.Value : 2; } else _outputs = outputs.Value; var inputs = XmlUtils.TryGetValueAsInt(xmlConfig, "Inputs"); _inputs = inputs.HasValue ? inputs.Value : _world.PlantTypes.Count() * World.SENSORS_PER_OBJECT_TYPE + (_distinguishPreds ? _predTypes : 1) * World.SENSORS_PER_OBJECT_TYPE + 1; _eaParams = new NeatEvolutionAlgorithmParameters(); _eaParams.SpecieCount = _specieCount; _neatGenomeParams = new NeatGenomeParameters() { ActivationFn = PlainSigmoid.__DefaultInstance }; if (_teaching != TeachingParadigm.EgalitarianEvolvedAcceptability) _neatGenomeParams.InitialInterconnectionsProportion = 0.1; }