Beispiel #1
0
        private static void makeAndSaveProblemDefinition()
        {
            var pd = new ProblemDefinition();

            /* Add a design space descriptor so that optimizatoin
             * methods for discrete variables can be used. Here we
             * make a very generous discretization, which amounts
             * to 2 million steps in each of the 2 design variables. */
            var dsd = new DesignSpaceDescription(2);

            for (var i = 0; i < 2; i++)
            {
                dsd[i] = new VariableDescriptor(-5000, 5000, 100.0);
            }
            pd.Add(dsd);

            /* Add three convergence criteria */
            pd.Add(new DeltaXConvergence(0.0001));
            pd.Add(new MaxAgeConvergence(100, 0.000000001));
            pd.Add(new MaxFnEvalsConvergence(50000));
            pd.Add(new MaxSpanInPopulationConvergence(1));

            /* setting the number of convergence criteria needed is not necessary
             * since we will be using the default value of 1. Interesting to un-
             * comment the next line and see how it affects the process. */
            //pd.NumConvergeCriteriaNeeded = 2;

            /* Add the objective function. */
            var objfn = new polynomialObjFn();

            objfn.Add("x1^2");
            objfn.Add("x2^2");
            objfn.Add("-2*x1");
            objfn.Add("-10*x2");
            objfn.Add("26");
            /* this is a simple parabola center at {1, 5} */
            pd.Add(objfn);

            var g1 = new polynomialInequality();

            g1.Add("-x1");
            g1.Add("x2"); /* this inequality translates to x2 - x1 < 0
                           * of simply x1 > x2. */
            pd.Add(g1);

            pd.Add(new double[] { 1500.0, 700.0 });
            var stream = new FileStream(filename, FileMode.Create);

            pd.SaveProbToXml(stream);
        }
        // This is the set of valid AGMA gear pitches (in unit of inch^-1).

        private static void Main()
        {
            //var opty = new GradientBasedOptimization();
            //var opty = new HillClimbing();
            var opty = new GeneticAlgorithm(100);

            var numGears = 2 * NumGearPairs;

            /* here is the Dependent Analysis. Take a look at the file/class ForceVelocityPositionAnalysis.cs
             * and notice that it inherits from IDependent Analysis. By adding this to the optimization method
             * (line 122), we are ensuring that it is called for any new decision variables found in the process.*/
            var FVPAnalysis = new ForceVelocityPositionAnalysis(numGears, outputTorque, inputSpeed, inputPosition);

            opty.Add(FVPAnalysis);

            /* here is the objective function, minimize mass. Note that it will hold a reference to the
            * ForceVelocityPositionAnalysis so that it can reference it for exact values of diamter. */
            opty.Add(new massObjective(FVPAnalysis, gearDensity));

            /* here is an inequality constraint for fitting within the box described above. Again, it
             * needs to position and diameter information stored in ForceVelocityPositionAnalysis */
            opty.Add(new boundingboxConstraint(FVPAnalysis, boxMinX, boxMaxX, boxMinY, boxMaxY, boxMinZ,
                                               boxMaxZ));

            /* on and on: stress inequality, output Location, output Speed equalities. Details can be found in
             * http://dx.doi.org/10.1115/DETC2009-86780 */
            opty.Add(new stressConstraint(FVPAnalysis, Nf, SFB, SFC));
            opty.Add(new outputLocationConstraint(FVPAnalysis, locationTol, outputX, outputY, outputZ));
            opty.Add(new outputSpeedConstraint(FVPAnalysis, speedTol, outputSpeed));
            for (var i = 0; i < NumGearPairs - 1; i++)
            {
                // each mating gear pair must have the same pitch.
                opty.Add(new samePitch(i * 4 + 1, (i + 1) * 4 + 1));
            }

            /******** Set up Design Space *************/

            /* for the GA and the Hill Climbing, a compete discrete space is needed. Face width and
             * location parameters should be continuous though. Consider removing the 800's below
             * when using a mixed optimization method. */
            var dsd = new DesignSpaceDescription(numGears * 4);

            for (var i = 0; i < numGears; i++)
            {
                dsd[4 * i]     = new VariableDescriptor(5, 1000, 1.0); // number of teeth: integers between 5 and 1000
                dsd[4 * i + 1] = new VariableDescriptor(ValidPitches); // pitches from AGMA standard
                dsd[4 * i + 2] = new VariableDescriptor(0, 50, 800);   // face width is between 0 and 50 inches
                dsd[4 * i + 3] = new VariableDescriptor(0, 500, 800);  //location is either an angle or a length
                // a max of 500 inches is generous
            }
            opty.Add(dsd);
            /******** Set up Optimization *************/

            /* the following mish-mash is similiar to previous project - just trying to find a
             * combination of methods that'll lead to the optimial optimization algorithm. */
            //abstractSearchDirection searchDirMethod = new SteepestDescent();
            //opty.Add(searchDirMethod);
            //abstractLineSearch lineSearchMethod = new ArithmeticMean(0.0001, 1, 100);
            //opty.Add(lineSearchMethod);
            opty.Add(new LatinHyperCube(dsd, VariablesInScope.BothDiscreteAndReal));
            opty.Add(new GACrossoverBitString(dsd));
            opty.Add(new GAMutationBitString(dsd));
            opty.Add(new PNormProportionalSelection(optimize.minimize, true, 0.7));
            //opty.Add(new RandomNeighborGenerator(dsd,3000));
            //opty.Add(new KeepSingleBest(optimize.minimize));
            opty.Add(new squaredExteriorPenalty(opty, 10));
            opty.Add(new MaxAgeConvergence(40, 0.001));
            opty.Add(new MaxFnEvalsConvergence(10000));
            opty.Add(new MaxSpanInPopulationConvergence(15));
            double[] xStar;
            Parameters.Verbosity = VerbosityLevels.AboveNormal;
            // this next line is to set the Debug statements from OOOT to the Console.
            Debug.Listeners.Add(new TextWriterTraceListener(Console.Out));
            var timer = Stopwatch.StartNew();
            var fStar = opty.Run(out xStar, numGears * 4);

            printResults(opty, xStar, fStar, timer);
            Console.ReadKey();
        }
Beispiel #3
0
        static void Main()
        {
            /* first a new optimization method in the form of a genetic algorithm is created. */
            var optMethod = new MultiObjectiveGeneticAlgorithm();

            /* The objective function is Rosenbrock's banana function again. */
            optMethod.Add(new polynomialObjFn
            {
                Terms = new List <string>
                {
                    "100*x1^4",
                    "-200*x1^2*x2",
                    "x1^2",
                    "-2*x1",
                    "100*x2^2",
                    "1"
                }
            });

            optMethod.Add(new RoyalRoads());

            /* Now a number of convergerence criteria are added. Again, since these all
             * inherit from the abstractConvergence class, the Add method knows to where
             * to store them. */
            optMethod.Add(new MaxIterationsConvergence(50000));     /* stop after 500 iteration (i.e. generations) */
            optMethod.Add(new MaxAgeConvergence(20, 0.000000001));  /*stop after 20 generations of the best not changing */
            optMethod.Add(new MaxSpanInPopulationConvergence(100)); /*stop if the largest distance is only one unit. */
            optMethod.NumConvergeCriteriaNeeded = 2;                /* two of these three criteria are needed to stop the process. */

            /* The genetic algorithm is for discrete problems. Therefore we need to provide the optimization algorithm
             * and the subsequent generators with the details of the space. The first variable represents the number of
             * passes in our fictitious problem. We set the lower bound to 1 and the upper bound to 20. The third argument
             * is the delta and since only integers are possible we set this to 1. The second and third variables are
             * really continous, but for the purpose of the GA we set a discretization at one-ten-thousandth for the second
             * and one-hundredth in the third. Note that you can provide either the delta or the number of steps. Here
             * 36,001 steps will make increments of one-hundredth. */
            var SpaceDescriptor = new DesignSpaceDescription
            {
                new VariableDescriptor(-100, 100, 0.0001),
                new VariableDescriptor(-100, 100, 0.0001),
                new VariableDescriptor(-100, 100, 0.0001)
            };

            optMethod.Add(SpaceDescriptor);

            /* the genetic algorithm requires some more values to be fully specified. These include initial,
             * crossover, and mutation generators, as well as a selector. A Latin Hyper Cube initial sample is
             * first created to assure the population covers the space well. */
            optMethod.Add(new LatinHyperCube(SpaceDescriptor, VariablesInScope.BothDiscreteAndReal));

            /* the typical bit-string approach to mutation and crossover are adopted here. Note that the
             * mutation rate (per candidate) is increased to 0.4 from the default of 0.1. Which means that
             * 4 in 10 candidates should experience at least one mutation. No new crossover rate is provided
             * therefore the default of 1.7 will be used. This means that between two parents there will likely
             * be 1.7 locations of crossover between them. */
            optMethod.Add(new GAMutationBitString(SpaceDescriptor, 0.4));
            optMethod.Add(new GACrossoverBitString(SpaceDescriptor));

            /* Finally, the selector is added to the population. This RandomPairwiseCompare is often referred to
             * as tournament selection wherein a random selection of two candidates results in the inferior one
             * being removed from the population. It requires the optimization direction: are lower values better
             * (minimize) or larger (maximize)? */
            optMethod.Add(new SkewboidDiversity(optimize.minimize, optimize.minimize));
            optMethod.Add(new RandomPairwiseCompare(optimize.minimize));

            /* for output statements (points in the code where the SearchIO.output(...) function is called, the
             * verbosity is set to 4 which is high. Typical values are between 0 and 4 but higher values (>4)
             * may be used, but this will likely cut into the speed of the search process. */
            Parameters.Verbosity = VerbosityLevels.AboveNormal;

            /* everything is set, we can now run the algorithm and retrieve the f* and x* values. */
            double[] xOptimal;
            var      fOptimal = optMethod.Run(out xOptimal);

            /* since we are curious how the process completed we now output some details. */
            SearchIO.output("f* = " + fOptimal); /* the 0 indicates that this statement has high priority
                                                  * and shouldn't be skipped in printing to the console. */
            SearchIO.output("x* = " + xOptimal.MakePrintString());
            SearchIO.output("The process converged by criteria: " + optMethod.ConvergenceDeclaredByTypeString);
            Console.ReadLine();
        }
        protected override void Run()
        {
            Random r = new Random(); //1);



            //  double[,] desiredPath ={{1.87,8},{2.93,8.46},{2.80,8.41},
            //                             {1.99,8.06},{0.96,7.46},{0,6.71},{-0.77,5.93},{-1.3,5.26},{-1.60,4.81},{-1.65,4.75},{-1.25,5.33},{0,6.71}};
            double[,] desiredPath = { { 125, 225 }, { 165.44, 217.76 }, { 189.57, 200.42 }, { 185.89, 178.49 }, { 158.65, 161.92 }, { 109.38, 135.30 }, { 57.997, 101.69 }, { 24.59, 82.07 }, { 0.33, 76.90 }, { -17.03, 91.46 }, { -13.92, 129.10 }, { -0.74, 155.01 }, { 20.73, 180.91 }, { 53.78, 205.65 }, { 88.17, 219.90 } };

            double         startAngle = 0;
            double         endAngle   = 2 * Math.PI;
            double         iOmega     = 2;
            double         iAlpha     = 0;
            MechSimulation sim        = new MechSimulation();


            //Below is a relation for bounding box and also the first point
            double bb_min, bb_max;

            //   bb_min = StarMath.Min(desiredPath);
            //  bb_max = StarMath.Max(desiredPath);

            //now that min and max are obtained - we will form a bounding box using these max and min values

            bb_max = 250;
            bb_min = 250;

            sim.Graph = seedGraph;
            //  designGraph testGraph = this.seedGraph;
            //   ev.c = new candidate(testGraph, 0);
            //  ev.c = this.seedGraph;

            //bounding box - trying to contain the solutions within a particular box
            //      BoundingBox bb = new BoundingBox(sim, bb_max,bb_min);
            //   GrashofCriteria cc = new GrashofCriteria(sim, 0);

            //adding a new objective function which can be taken by the optimization program
            var pathObjFun = new ComparePathWithDesired(seedCandidate, desiredPath, sim);


            //initializing the optimization program
            var optMethod = new NelderMead();

            //var optMethod = new GradientBasedOptimization();

            optMethod.Add(new PowellMethod());
            optMethod.Add(new DSCPowell(0.00001, .5, 1000));


            //     optMethod.Add(new GoldenSection(0.001,300));
            optMethod.Add(new ArithmeticMean(0.001, 0.1, 300));

            //adding simulation
            optMethod.Add(sim);

            //adding objective function to this optimization routine
            optMethod.Add(pathObjFun);

            //we are removing this since we do not have a merit function defined
            optMethod.Add(new squaredExteriorPenalty(optMethod, 1.0));
            //      optMethod.Add(bb);
            //    optMethod.Add(cc);

            // convergence
            optMethod.Add(new MaxIterationsConvergence(100));
            //   optMethod.Add(new DeltaXConvergence(0.01));
            optMethod.Add(new ToKnownBestFConvergence(0.0, 0.1));
            optMethod.Add(new MaxSpanInPopulationConvergence(0.01));

            var n     = 6;
            var dsd   = new DesignSpaceDescription();
            var minX  = StarMath.Min(StarMath.GetColumn(0, desiredPath));
            var maxX  = StarMath.Max(StarMath.GetColumn(0, desiredPath));
            var minY  = StarMath.Min(StarMath.GetColumn(1, desiredPath));
            var maxY  = StarMath.Max(StarMath.GetColumn(1, desiredPath));
            var delta = maxX - minX;

            minX -= delta;
            maxX += delta;
            delta = maxY - minY;
            minY -= delta;
            maxY += delta;

            for (int i = 0; i < n; i++)
            {
                if (i % 2 == 0)
                {
                    dsd.Add(new VariableDescriptor(minX, maxX));
                }
                else
                {
                    dsd.Add(new VariableDescriptor(minY, maxY));
                }
            }
            // dsd.Add(new VariableDescriptor(0,300));
            var LHC        = new LatinHyperCube(dsd, VariablesInScope.BothDiscreteAndReal);
            var initPoints = LHC.GenerateCandidates(null, 100);

            //for each initPoints - generate the fstar value



            //generating random x,y values
            //double[] x0 = new double[8];
            //for (int i = 0; i < x0.GetLength(0); i++) //since I am going to assign ground pivots as they are
            //    x0[i] =  100*r.NextDouble();


            //sim.calculate(x0);

            // double[] xStar;
            // double fStar = optMethod.Run(out xStar, x0);
            //// double fStar = optMethod.Run(out xStar, 8);

            double[]        fStar1 = new double[initPoints.Count];
            List <double[]> xStar1 = new List <double[]>();


            for (int i = 0; i < fStar1.GetLength(0); i++)
            {
                double[] x0 = new double[n];
                x0 = initPoints[i];
                double[] xStar;
                double   fStar = optMethod.Run(out xStar, x0);
                fStar1[i] = fStar;
                xStar1.Add(xStar);
                SearchIO.output("LHC i: " + i);
            }

            int xstarindex;

            SearchIO.output("fStar Min=" + StarMath.Min(fStar1, out xstarindex), 0);
            SearchIO.output("Xstar Values:" + xStar1[xstarindex]);
            SearchIO.output("***Converged by" + optMethod.ConvergenceDeclaredByTypeString, 0);
            SearchIO.output("Rerunning with new x values", 0);

            //     var optMethod1 = new GradientBasedOptimization();
            //     optMethod1.Add(new FletcherReevesDirection());
            //  //   optMethod1.Add(new ArithmeticMean(0.001, 0.1, 300));

            //     optMethod1.Add(new GoldenSection(0.001, 300));
            //     optMethod1.Add(sim);
            //     optMethod1.Add(pathObjFun);
            //     optMethod1.Add(new squaredExteriorPenalty(optMethod, 1.0));
            ////     optMethod1.Add(new MaxIterationsConvergence(100));
            //     optMethod1.Add(new ToKnownBestFConvergence(0.0, 0.1));
            //   //  optMethod.Add(new MaxSpanInPopulationConvergence(0.01))

            //     double[] xStar2;
            //     double fStar2 = optMethod1.Run(out xStar2, xStar1[xstarindex]);

            //     SearchIO.output("New Fstar = " + fStar2, 0);

            //double xstarmin, xstarmax;
            //xstarmax = StarMath.Max(xStar1[xstarindex]);
            //xstarmin = StarMath.Min(xStar1[xstarindex]);

            //var dsd1 = new DesignSpaceDescription();
            //dsd1.Add(new VariableDescriptor(xstarmin, xstarmax));
            //var LHC1 = new LatinHyperCube(dsd1, VariablesInScope.BothDiscreteAndReal);
            //var initPoints1 = LHC.GenerateCandidates(null, 100);
            //double[] fstar1 = new double[initPoints1.Count];
            //List<double[]> xstar_second = new List<double[]>();
            //for (int i = 0; i < fstar1.GetLength(0); i++)
            //{
            //    double[] x0 = new double[n];
            //    x0 = initPoints[i];
            //    double[] xStar;
            //    double fStar = optMethod.Run(out xStar, x0);
            //    fstar1[i] = fStar;
            //    xstar_second.Add(xStar);
            //    SearchIO.output("LHC i: " + i);

            //}



            //SearchIO.output("New fStar = " + StarMath.Min(fstar1), 0);


            //SearchIO.output("***Converged by" + optMethod.ConvergenceDeclaredByTypeString, 0);
        }
Beispiel #5
0
        static void Main()
        {
            Parameters.Verbosity = VerbosityLevels.AboveNormal;
            // this next line is to set the Debug statements from OOOT to the Console.
            Trace.Listeners.Add(new TextWriterTraceListener(Console.Out));
            /* first a new optimization method in the form of a genetic algorithm is created. */
            var optMethod = new GeneticAlgorithm(100);

            /* then an objective function and constraints are added. Since these inherit from
             * type OptimizationToolbox.objectiveFunction and OptimizationToolbox.inequality
             * the Add function knows where to store them so that they will be invoked by the
             * fitness evaluation section of the GA. */
            optMethod.Add(new efficiencyMeasurement());

            //optMethod.Add(new lessThanManifoldVolume());

            /* GA's cannot explicitly handle inequalities, so a merit function must be added
             * so that the fitness evaluation knows how to combine the constraint with the
             * objective function. */
            //optMethod.Add(new squaredExteriorPenalty(optMethod, 50));

            /* Now a number of convergerence criteria are added. Again, since these all
             * inherit from the abstractConvergence class, the Add method knows to where
             * to store them. */
            optMethod.Add(new ToKnownBestFConvergence(0.0, 0.5));
            optMethod.Add(new MaxIterationsConvergence(50000));     /* stop after 500 iteration (i.e. generations) */
            optMethod.Add(new MaxAgeConvergence(20, 0.000000001));  /*stop after 20 generations of the best not changing */
            optMethod.Add(new MaxSpanInPopulationConvergence(100)); /*stop if the largest distance is only one unit. */
            optMethod.NumConvergeCriteriaNeeded = 2;                /* two of these three criteria are needed to stop the process. */

            /* The genetic algorithm is for discrete problems. Therefore we need to provide the optimization algorithm
             * and the subsequent generators with the details of the space. The first variable represents the number of
             * passes in our fictitious problem. We set the lower bound to 1 and the upper bound to 20. The third argument
             * is the delta and since only integers are possible we set this to 1. The second and third variables are
             * really continous, but for the purpose of the GA we set a discretization at one-ten-thousandth for the second
             * and one-hundredth in the third. Note that you can provide either the delta or the number of steps. Here
             * 36,001 steps will make increments of one-hundredth. */
            var SpaceDescriptor = new DesignSpaceDescription
            {
                new VariableDescriptor(1, 20, 1.0),
                new VariableDescriptor(0, 100, 0.0001),
                new VariableDescriptor(-180, 180, 36000)
            };

            optMethod.Add(SpaceDescriptor);

            /* the genetic algorithm requires some more values to be fully specified. These include initial,
             * crossover, and mutation generators, as well as a selector. A Latin Hyper Cube initial sample is
             * first created to assure the population covers the space well. */
            optMethod.Add(new LatinHyperCube(SpaceDescriptor, VariablesInScope.BothDiscreteAndReal));

            /* the typical bit-string approach to mutation and crossover are adopted here. Note that the
             * mutation rate (per candidate) is increased to 0.4 from the default of 0.1. Which means that
             * 4 in 10 candidates should experience at least one mutation. No new crossover rate is provided
             * therefore the default of 1.7 will be used. This means that between two parents there will likely
             * be 1.7 locations of crossover between them. */
            optMethod.Add(new GAMutationBitString(SpaceDescriptor, 0.4));
            optMethod.Add(new GACrossoverBitString(SpaceDescriptor));

            /* Finally, the selector is added to the population. This RandomPairwiseCompare is often referred to
             * as tournament selection wherein a random selection of two candidates results in the inferior one
             * being removed from the population. It requires the optimization direction: are lower values better
             * (minimize) or larger (maximize)? */
            optMethod.Add(new RandomPairwiseCompare(optimize.minimize));

            /* for output statements (points in the code where the SearchIO.output(...) function is called, the
             * verbosity is set to 4 which is high. Typical values are between 0 and 4 but higher values (>4)
             * may be used, but this will likely cut into the speed of the search process. */
            Parameters.Verbosity = VerbosityLevels.AboveNormal;
            // this next line is to set the Debug statements from OOOT to the Console.
            Trace.Listeners.Add(new TextWriterTraceListener(Console.Out));

            var timer = Stopwatch.StartNew();

            /* everything is set, we can now run the algorithm and retrieve the f* and x* values. */
            double[] xOptimal;
            var      fOptimal = optMethod.Run(out xOptimal);

            printResults(optMethod, xOptimal, fOptimal, timer);
        }