Example #1
0
        public override IEntity Decode(MultiKey islandKey, Dictionary <MultiKey, IEntity> entities)
        {
            double totalDistance = 0.0;
            var    entity        = entities[Key] as TSPEntity;
            var    chromosome    = entity.Chromosomes[Key[0]] as PermutationChromosome;
            var    firstPoint    = TSPPoints[chromosome.Genes[0]];
            var    previousPoint = firstPoint;

            for (int i = 1; i < chromosome.Genes.Length; i++)
            {
                var point = TSPPoints[chromosome.Genes[i]];
                totalDistance += GetDistance(previousPoint, point);
                entity.Phenotype.Add(previousPoint);
                previousPoint = point;
            }

            totalDistance += GetDistance(previousPoint, firstPoint);
            entity.Phenotype.Add(previousPoint);

            MultiObjectiveFitness fitness = new MultiObjectiveFitness(new double[] { -1 * totalDistance });

            entity.SetFitness(fitness);
            entity.TotalDistance = totalDistance;

            EntityCount++;

            return(entity);
        }
Example #2
0
        public override IEntity Decode(MultiKey islandKey, Dictionary <MultiKey, IEntity> entities)
        {
            EntityCount++;
            SortedSubsetChromosomeValidator.EntityCount = EntityCount;

            var entity     = entities[Key] as VehicleSchedulingEntity;
            var chromosome = entity.Chromosomes[Key[0]] as SortedSubsetChromosome;

            entity.VehiclesCount = chromosome.Sections.Length;

            for (int s = 0; s < chromosome.Sections.Length; s++)
            {
                //int DailyFirstTime = InitData.Trips[chromosome.Sections[s][0]].ArrivalTime;
                //int length = chromosome.Sections[s].Length;
                //int DailyLastTime = InitData.Trips[chromosome.Sections[s][length-1]].DepartureTime;
                //int? AMLastTime = null;
                //int? PMFirstTime = null;

                for (int p = 0; p < chromosome.Sections[s].Length - 1; p++)
                {
                    if (ConflictDetector.ConflictDetected(chromosome.Sections[s][p], chromosome.Sections[s][p + 1]))
                    {
                        throw new ApplicationException("Shit happend");
                        //return null;    //hard conflict
                    }

                    var trip1 = InitData.Trips[chromosome.Sections[s][p]];
                    var trip2 = InitData.Trips[chromosome.Sections[s][p + 1]];

                    double distance = InitData.GetDistance(trip1.LastStopId, trip2.FirstStopId);
                    entity.TotalDeadMileage += distance;

                    //if ((trip1.DepartureTime <= 720) && (!AMLastTime.HasValue || trip1.DepartureTime > AMLastTime)) AMLastTime = trip1.DepartureTime;
                    //if ((trip1.ArrivalTime >= 720) && (!PMFirstTime.HasValue || trip1.ArrivalTime < PMFirstTime)) PMFirstTime = trip1.ArrivalTime;
                }

                //var lastTrip = InitData.Trips[chromosome.Sections[s][length-1]];
                //if ((lastTrip.DepartureTime <= 720) && (!AMLastTime.HasValue || lastTrip.DepartureTime > AMLastTime)) AMLastTime = lastTrip.DepartureTime;
                //if ((lastTrip.ArrivalTime >= 720) && (!PMFirstTime.HasValue || lastTrip.ArrivalTime < PMFirstTime)) PMFirstTime = lastTrip.ArrivalTime;

                //entity.TotalActiveTime += DailyLastTime - DailyFirstTime;
                //if (AMLastTime.HasValue) entity.TotalActiveTime -= (720 - AMLastTime.Value);
                //if (PMFirstTime.HasValue) entity.TotalActiveTime -= (PMFirstTime.Value - 720);
            }

            var fitness0 = 1 / (1 + entity.TotalDeadMileage);
            var fitness1 = 1 / (1 + (double)entity.VehiclesCount);
            MultiObjectiveFitness fitness = new MultiObjectiveFitness(new double[] { fitness0, fitness1 });

            //fitness.Value[2] = 1 / (1 + (double) entity.TotalActiveTime);
            //GetAverageLengthOfLongSections(chromosome.Sections);

            entity.SetFitness(fitness);

            return(entity);
        }
Example #3
0
        static void Main(string[] args)
        {
            var tspData  = LoadCsv("Berlin52.csv");
            var initData = new TSPInitData(tspData);

            var optimizer = Optimizer.Create();

            optimizer.Settings.WithSeed(Environment.TickCount);
            optimizer.Settings.AddSubProblem("TSP", new TravelingSalesman(tspData.Count));
            optimizer.Settings.WithEntityType <TSPEntity>().WithEvaluation <TSPEvaluation>();
            optimizer.SetParameter("TestParameter", 42.0);

            var fitnessLimit = new MultiObjectiveFitness(new double[] { -7545 });

            optimizer.Settings.StopWhen().FitnessLimitExceeded(fitnessLimit)
            .Or().TimeoutElapsed(300000);

            optimizer.SetParameter(Pea.Core.Island.ParameterNames.IslandsCount, 10);

            Stopwatch sw = Stopwatch.StartNew();

            var result = optimizer.Run(initData);

            //var result = AsyncUtil.RunSync(() => system.Start(initData));

            foreach (var reason in result.StopReasons)
            {
                Console.WriteLine(reason);
            }

            sw.Stop();
            var elapsed  = sw.ElapsedMilliseconds;
            var entities = TSPEvaluation.EntityCount;
            var speed    = entities / elapsed;

            Console.WriteLine($"Elapsed: {elapsed} Entities: {entities} ({speed} ent./ms)");

            Console.ReadLine();
        }
Example #4
0
        public void BuildArchive(IEvolutionState state, IList <Individual> oldInds, IList <Individual> newInds, int archiveSize)
        {
            // step 1: load the archive with the pareto-nondominated front
            var archive  = new List <Individual>();
            var nonFront = new List <Individual>();

            MultiObjectiveFitness.PartitionIntoParetoFront(oldInds, archive, nonFront);
            var currentArchiveSize = archive.Count;

            // step 2: if the archive isn't full, load the remainder with the fittest individuals (using customFitnessMetric) that aren't in the archive yet
            if (currentArchiveSize < archiveSize)
            {
                // BRS : The following uses Individual's IComparable implementation based on Fitness.
                // The fitter individuals will be earlier
                nonFront.SortByFitnessDescending();
                var len = archiveSize - currentArchiveSize;
                for (var i = 0; i < len; i++)
                {
                    archive.Add(nonFront[i]);
                    currentArchiveSize++;
                }
            }

            // step 3: if the archive is OVERFULL, iterate as follows:
            //              step 3a: remove the k-closest individual in the archive
            //var evaluator = ((ISPEA2Evaluator)(state.Evaluator));
            //var inds = archive.ToArray();

            while (currentArchiveSize > archiveSize)
            {
                var closest      = archive[0];
                var closestIndex = 0;
                var closestD     = CalculateDistancesFromIndividual(closest, oldInds);

                for (var i = 1; i < currentArchiveSize; i++)
                {
                    var competitor  = archive[i];
                    var competitorD = CalculateDistancesFromIndividual(competitor, oldInds);

                    for (var k = 0; k < oldInds.Count; k++)
                    {
                        if (closestD[i] > competitorD[i])
                        {
                            closest      = competitor;
                            closestD     = competitorD;
                            closestIndex = k;
                            break;
                        }
                        else if (closestD[i] < competitorD[i])
                        {
                            break;
                        }
                    }
                }

                // remove him destructively -- put the top guy in his place and remove the top guy.  This is O(1)
                archive[closestIndex] = archive[archive.Count - 1];
                archive.RemoveAt(archive.Count - 1);

                currentArchiveSize--;
            }

            // step 4: put clones of the archive in the new individuals
            var arr = archive.ToArray();

            for (var i = 0; i < archiveSize; i++)
            {
                newInds[newInds.Count - archiveSize + i] = (Individual)arr[i].Clone();
            }
        }
Example #5
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            IMLDataSet trainingData = GenerationUtil.GenerateSingleDataRange(
                (x) => (3 * Math.Pow(x, 2) + (12 * x) + 4)
                , 0, 100, 1);

            EncogProgramContext context = new EncogProgramContext();

            context.DefineVariable("x");

            StandardExtensions.CreateNumericOperators(context);

            PrgPopulation pop = new PrgPopulation(context, 1000);

            MultiObjectiveFitness score = new MultiObjectiveFitness();

            score.AddObjective(1.0, new TrainingSetScore(trainingData));

            TrainEA genetic = new TrainEA(pop, score);

            genetic.ValidationMode = true;
            genetic.CODEC          = new PrgCODEC();
            genetic.AddOperation(0.5, new SubtreeCrossover());
            genetic.AddOperation(0.25, new ConstMutation(context, 0.5, 1.0));
            genetic.AddOperation(0.25, new SubtreeMutation(context, 4));
            genetic.AddScoreAdjuster(new ComplexityAdjustedScore(10, 20, 10, 20.0));
            genetic.Rules.AddRewriteRule(new RewriteConstants());
            genetic.Rules.AddRewriteRule(new RewriteAlgebraic());
            genetic.Speciation = new PrgSpeciation();

            (new RampedHalfAndHalf(context, 1, 6)).Generate(new EncogRandom(), pop);

            genetic.ShouldIgnoreExceptions = false;

            EncogProgram best = null;

            genetic.ThreadCount = 1;

            try
            {
                for (int i = 0; i < 1000; i++)
                {
                    genetic.Iteration();
                    best = (EncogProgram)genetic.BestGenome;
                    Console.Out.WriteLine(genetic.IterationNumber + ", Error: "
                                          + best.Score + ",Best Genome Size:" + best.Size
                                          + ",Species Count:" + pop.Species.Count + ",best: " + best.DumpAsCommonExpression());
                }

                //EncogUtility.evaluate(best, trainingData);

                Console.Out.WriteLine("Final score:" + best.Score
                                      + ", effective score:" + best.AdjustedScore);
                Console.Out.WriteLine(best.DumpAsCommonExpression());
                //pop.dumpMembers(Integer.MAX_VALUE);
                //pop.dumpMembers(10);
            }
            catch (Exception t)
            {
                Console.Out.WriteLine(t.ToString());
            }
            finally
            {
                genetic.FinishTraining();
                EncogFramework.Instance.Shutdown();
            }
        }
Example #6
0
        public void MultiObjectiveFitnessWriteAndRead()
        {
            var rand = new MersenneTwisterFast(0);
            var f    = new MultiObjectiveFitness(2);
            var f2   = new MultiObjectiveFitness(2);     // We'll use this when we read the other one back in

            f2.SetObjectives(null, new [] { 0.0, 0.0 }); // setting these to zero allows us to check if they change

            // Default is to Maximize!

            // These need to be set up manually here because we are not running 'Setup'.
            // Every instance would thus normally share the same min and max values.
            f.MaxObjective  = new [] { 1.0, 1.0 };
            f.MinObjective  = new [] { 0.0, 0.0 };
            f2.MaxObjective = new[] { 1.0, 1.0 };
            f2.MinObjective = new[] { 0.0, 0.0 };

            // Set two objective (fitness) values, worst and best respectively
            f.SetObjectives(null, new [] { 0.0, 1.0 });

            // Set up some random Trials just to check that they get transmitted
            const int n = 10;

            f.Trials = new List <double>(n);
            for (var i = 0; i < n; i++)
            {
                f.Trials.Add(rand.NextDouble() * double.MaxValue); // in the half-open interval [0.0, double.MaxValue)
            }

            using (var ms = new MemoryStream())
            {
                var writer = new BinaryWriter(ms);
                f.WriteFitness(null, writer); // Write
                ms.Position = 0;
                var reader = new BinaryReader(ms);
                f2.ReadFitness(null, reader); // Read

                // Compare
                Assert.AreEqual(f.Value, f2.Value); // Value is same. This is the MAX objective value in the array

                // Just for kicks, lets make sure BOTH objectives are equal to the original values
                Assert.AreEqual(f.GetObjective(0), 0.0);
                Assert.AreEqual(f.GetObjective(0), f2.GetObjective(0));
                Assert.AreEqual(f.GetObjective(1), 1.0);
                Assert.AreEqual(f.GetObjective(1), f2.GetObjective(1));

                // And let's make sure our MAX and MIN are the same (these wouldn't normally change once established in 'Setup')
                Assert.AreEqual(f.MinObjective[0], f2.MinObjective[0]);
                Assert.AreEqual(f.MinObjective[1], f2.MinObjective[1]);
                Assert.AreEqual(f.MaxObjective[0], f2.MaxObjective[0]);
                Assert.AreEqual(f.MaxObjective[1], f2.MaxObjective[1]);

                Assert.IsTrue(f.Value <= 1.0f && f2.Value <= 1.0f); // This is the MAX objective value in the array
                Assert.IsFalse(f2.IsIdeal);                         // Fitness is not ideal by default (ideal must be defined in subclass)

                Assert.AreEqual(f2.Trials.Count, f.Trials.Count);   // Number of trials is the same
                for (var i = 0; i < f.Trials.Count; i++)
                {
                    Assert.AreEqual((double)f.Trials[i], (double)f2.Trials[i]); // Trial values are all the same
                }
            }
        }