public void ReturnEvaluatedIndividual(Individual cur) { cur.ID = _individualsEvaluated; _individualsEvaluated++; cur.Features = new double[featureNames.Length]; for (int i = 0; i < featureNames.Length; i++) { cur.Features[i] = cur.GetStatByName(featureNames[i]); } _featureMap.Add(cur); _map_log.UpdateLog(); }
protected override StageResult Init() { if (!Success(base.Init(), out StageResult r)) { return(r); } FeatureMap.Add(0, "TEXT"); FeatureMap.Add(1, "LEXICAL"); FeatureMap.Add(2, "SYNTACTIC"); if (WithSyntaxFeatures) { Info("Selecting syntactic features."); } return(StageResult.SUCCESS); }
private void CalculateFitness(Individual cur) { cur.RawFitness = cur.TotalHealthDifference; cur.Features = new [] { cur.DamageDone, cur.NumTurns }; Console.WriteLine("Raw Fitness: " + cur.RawFitness); if (_featureMap.Add(cur)) { // Reassess the fitness of all elements var queue = new Queue <Shard>(); while (!_populationDeque.IsEmpty) { Shard curShard = _populationDeque.DeleteMax(); curShard.Representative.Fitness = -_featureMap.GetRank(curShard.Representative); queue.Enqueue(curShard); } while (queue.Count > 0) { Shard curShard = queue.Dequeue(); _populationDeque.Add(curShard); } } cur.Fitness = -_featureMap.GetRank(cur); Console.WriteLine("Fitness: " + cur.Fitness); // Save stats bool didHitMaxWins = cur.WinCount > _maxWins; bool didHitMaxFitness = cur.RawFitness > _maxFitness; _maxWins = Math.Max(_maxWins, cur.WinCount); _maxFitness = Math.Max(_maxFitness, cur.RawFitness); // Log individual if (didHitMaxWins) { _champion_log.LogIndividual(cur); } if (didHitMaxFitness) { _fittest_log.LogIndividual(cur); } }
public void ReturnEvaluatedIndividual(Individual ind) { ind.ID = _individualsEvaluated; _individualsEvaluated++; ind.Features = new double[_params.Map.Features.Length]; for (int i = 0; i < _params.Map.Features.Length; i++) { ind.Features[i] = ind.GetStatByName(_params.Map.Features[i].Name); } _featureMap.Add(ind); if (_individualsEvaluated % 100 == 0) { _map_log.UpdateLog(); } }
public List <int> MapFeatures(string[] features) { var posFeatureIds = new List <int>(); foreach (var feature in features) { if (FeatureMap.ContainsKey(feature)) { posFeatureIds.Add(FeatureMap[feature]); } else { FeatureMap.Add(feature, EntryCount); posFeatureIds.Add(EntryCount); EntryCount++; } } return(posFeatureIds); }
public void ReturnEvaluatedIndividual(Individual ind) { ind.ID = _individualsEvaluated; _individualsEvaluated++; ind.Features = new double[_params.Map.Features.Length]; for (int i = 0; i < _params.Map.Features.Length; i++) { ind.Features[i] = ind.GetStatByName(_params.Map.Features[i].Name); } _maxFitness = Math.Max(_maxFitness, ind.Fitness); _featureMap.Add(ind); if (_individualsEvaluated % _params.Map.MapLoggingFrequency == 0) { _map_log.UpdateLog(); } if (_individualsEvaluated % _params.Map.SummaryLoggingFrequency == 0) { _summary_log.UpdateLog(_individualsEvaluated); } }
public void ReturnEvaluatedIndividual(Individual ind) { bool didAdd = _featureMap.Add(ind); if (ind.Generation != _generation) { return; } if (didAdd) { if (ind.IsNovel) { _novel_parents.Add(ind); } else { _improved_parents.Add(ind); } } _individualsEvaluated++; _population_count++; if (_population_count >= _params.PopulationSize) { int numParents = _novel_parents.Count + _improved_parents.Count; bool needsRestart = numParents == 0; // Only update if we have parents. if (numParents > 0) { var parents = _novel_parents.OrderByDescending(o => o.Delta).Concat( _improved_parents.OrderByDescending(o => o.Delta)).ToList(); // Calculate fresh weights for the number of elites found var weights = LA.Vector <double> .Build.Dense(numParents); for (int i = 0; i < numParents; i++) { weights[i] = Math.Log(numParents + 0.5) - Math.Log(i + 1); } weights /= weights.Sum(); // Dynamically update the hyperparameters for CMA-ES double sumWeights = weights.Sum(); double sumSquares = weights.Sum(x => x * x); double mueff = sumWeights * sumWeights / sumSquares; double cc = (4 + mueff / _numParams) / (_numParams + 4 + 2 * mueff / _numParams); double cs = (mueff + 2) / (_numParams + mueff + 5); double c1 = 2 / (Math.Pow(_numParams + 1.3, 2) + mueff); double cmu = Math.Min(1 - c1, 2 * (mueff - 2 + 1 / mueff) / (Math.Pow(_numParams + 2, 2) + mueff)); double damps = 1 + 2 * Math.Max(0, Math.Sqrt((mueff - 1) / (_numParams + 1)) - 1) + cs; double chiN = Math.Sqrt(_numParams) * (1.0 - 1.0 / (4.0 * _numParams) + 1.0 / (21.0 * Math.Pow(_numParams, 2))); // Recombination of the new mean LA.Vector <double> oldMean = _mean; _mean = LA.Vector <double> .Build.Dense(_numParams); for (int i = 0; i < numParents; i++) { _mean += DenseVector.OfArray(parents[i].ParamVector) * weights[i]; } // Update the evolution path LA.Vector <double> y = _mean - oldMean; LA.Vector <double> z = _C.Invsqrt * y; _ps = (1.0 - cs) * _ps + (Math.Sqrt(cs * (2.0 - cs) * mueff) / _mutationPower) * z; double left = _ps.DotProduct(_ps) / _numParams / (1.0 - Math.Pow(1.0 - cs, 2 * _individualsEvaluated / _params.PopulationSize)); double right = 2.0 + 4.0 / (_numParams + 1.0); double hsig = left < right ? 1 : 0; _pc = (1.0 - cc) * _pc + hsig * Math.Sqrt(cc * (2.0 - cc) * mueff) * y; // Covariance matrix update double c1a = c1 * (1.0 - (1.0 - hsig * hsig) * cc * (2.0 - cc)); _C.C *= (1.0 - c1a - cmu); _C.C += c1 * _pc.OuterProduct(_pc); for (int i = 0; i < _params.NumParents; i++) { LA.Vector <double> dv = DenseVector.OfArray(parents[i].ParamVector) - oldMean; _C.C += weights[i] * cmu *dv.OuterProduct(dv) / (_mutationPower * _mutationPower); } if (checkStop(parents)) { needsRestart = true; } else { _C.UpdateEigensystem(); } // Update sigma double cn = cs / damps; double sumSquarePs = _ps.DotProduct(_ps); _mutationPower *= Math.Exp(Math.Min(1, cn * (sumSquarePs / _numParams - 1) / 2)); } if (needsRestart) { reset(); } _generation++; _individualsDispatched = 0; _population_count = 0; _novel_parents.Clear(); _improved_parents.Clear(); } }
public void Run() { _individualsEvaluated = 0; _maxWins = 0; _maxFitness = Int32.MinValue; _runningWorkers = new Queue <int>(); _idleWorkers = new Queue <int>(); string boxesDirectory = "boxes/"; string inboxTemplate = boxesDirectory + "deck-{0,4:D4}-inbox.tml"; string outboxTemplate = boxesDirectory + "deck-{0,4:D4}-outbox.tml"; // Let the workers know we are here. string activeDirectory = "active/"; string activeWorkerTemplate = activeDirectory + "worker-{0,4:D4}.txt"; string activeSearchPath = activeDirectory + "search.txt"; using (FileStream ow = File.Open(activeSearchPath, FileMode.Create, FileAccess.Write, FileShare.None)) { WriteText(ow, "MAP Elites"); WriteText(ow, _configFilename); ow.Close(); } Console.WriteLine("Begin search..."); while (_individualsEvaluated < _params.NumToEvaluate) { // Look for new workers. string[] hailingFiles = Directory.GetFiles(activeDirectory); foreach (string activeFile in hailingFiles) { string prefix = activeDirectory + "worker-"; if (activeFile.StartsWith(prefix)) { string suffix = ".txt"; int start = prefix.Length; int end = activeFile.Length - suffix.Length; string label = activeFile.Substring(start, end - start); int workerId = Int32.Parse(label); _idleWorkers.Enqueue(workerId); _individualStable.Add(workerId, null); File.Delete(activeFile); Console.WriteLine("Found worker " + workerId); } } // Dispatch jobs to the available workers. while (_idleWorkers.Count > 0) { if (_individualsDispatched >= _params.InitialPopulation && _individualsEvaluated == 0) { break; } int workerId = _idleWorkers.Dequeue(); _runningWorkers.Enqueue(workerId); Console.WriteLine("Starting worker: " + workerId); Individual choiceIndividual = _individualsDispatched < _params.InitialPopulation ? Individual.GenerateRandomIndividual(_cardSet) : _featureMap.GetRandomElite().Mutate(); string inboxPath = string.Format(inboxTemplate, workerId); SendWork(inboxPath, choiceIndividual); _individualStable[workerId] = choiceIndividual; _individualsDispatched++; } // Look for individuals that are done. int numActiveWorkers = _runningWorkers.Count; for (int i = 0; i < numActiveWorkers; i++) { int workerId = _runningWorkers.Dequeue(); string inboxPath = string.Format(inboxTemplate, workerId); string outboxPath = string.Format(outboxTemplate, workerId); // Test if this worker is done. if (File.Exists(outboxPath) && !File.Exists(inboxPath)) { // Wait for the file to finish being written. Console.WriteLine("Worker done: " + workerId); ReceiveResults(outboxPath, _individualStable[workerId]); _featureMap.Add(_individualStable[workerId]); _idleWorkers.Enqueue(workerId); _individualsEvaluated++; _map_log.UpdateLog(); } else { _runningWorkers.Enqueue(workerId); } } Thread.Sleep(1000); } // Let the workers know that we are done. File.Delete(activeSearchPath); }
public void ReturnEvaluatedIndividual(Individual ind) { ind.ID = _individualsEvaluatedTotal; _individualsEvaluatedTotal++; // Update map information for comparison purposes ind.Features = new double[_params.Map.Features.Length]; for (int i = 0; i < _params.Map.Features.Length; i++) { ind.Features[i] = ind.GetStatByName(_params.Map.Features[i].Name); } _featureMap.Add(ind); if (_individualsEvaluatedTotal % _params.Map.MapLoggingFrequency == 0) { _map_log.UpdateLog(); } if (_individualsEvaluatedTotal % _params.Map.SummaryLoggingFrequency == 0) { _summary_log.UpdateLog(_individualsEvaluatedTotal); } // Store the best individual if (_bestIndividual == null || _bestIndividual.Fitness < ind.Fitness) { _bestIndividual = ind; } // Used for overflow only if (ind.Generation != _generation) { return; } // Note that we don't use overflow individuals in adaptation calculations. _individualsEvaluated++; _population.Add(ind); if (_population.Count >= _params.PopulationSize) { // Rank solutions var parents = _population.OrderByDescending(o => o.Fitness) .Take(_params.NumParents).ToList(); // Recombination of the new mean LA.Vector <double> oldMean = _mean; _mean = LA.Vector <double> .Build.Dense(_numParams); for (int i = 0; i < _params.NumParents; i++) { _mean += DenseVector.OfArray(parents[i].ParamVector) * _weights[i]; } // Update the evolution path LA.Vector <double> y = _mean - oldMean; LA.Vector <double> z = _C.Invsqrt * y; _ps = (1.0 - _cs) * _ps + (Math.Sqrt(_cs * (2.0 - _cs) * _mueff) / _mutationPower) * z; double left = _ps.DotProduct(_ps) / _numParams / (1.0 - Math.Pow(1.0 - _cs, 2 * _individualsEvaluated / _params.PopulationSize)); double right = 2.0 + 4.0 / (_numParams + 1.0); double hsig = left < right ? 1 : 0; _pc = (1.0 - _cc) * _pc + hsig * Math.Sqrt(_cc * (2.0 - _cc) * _mueff) * y; // Covariance matrix update double c1a = _c1 * (1.0 - (1.0 - hsig * hsig) * _cc * (2.0 - _cc)); _C.C *= (1.0 - c1a - _cmu); _C.C += _c1 * _pc.OuterProduct(_pc); for (int i = 0; i < _params.NumParents; i++) { LA.Vector <double> dv = DenseVector.OfArray(parents[i].ParamVector) - oldMean; _C.C += _weights[i] * _cmu *dv.OuterProduct(dv) / (_mutationPower * _mutationPower); } if (CheckStop(parents)) { Reset(); } _C.UpdateEigensystem(); // Update sigma double cn = _cs / _damps; double sumSquarePs = _ps.DotProduct(_ps); _mutationPower *= Math.Exp(Math.Min(1, cn * (sumSquarePs / _numParams - 1) / 2)); _generation++; _individualsDispatched = 0; _population.Clear(); } }