示例#1
0
        public static double StaticEvaluation2D(
            Func <double, double, XY> BaseFn,
            ParetoResultAnalyzer pra1, ParetoResultAnalyzer pra2,
            ParetoVariable pv1, ParetoVariable pv2
            )
        {
            XY BaseResult = BaseFn(pv1.val, pv2.val);

            return(pra1.fn(BaseResult) * pra1.weight + pra2.fn(BaseResult) * pra2.weight);
        }
示例#2
0
        public ParetoVariable get_increment()
        {
            ParetoVariable dpv = new ParetoVariable();

            dpv.val       = this.val + this.dx;
            dpv.af        = this.af;
            dpv.dx        = this.dx;
            dpv.min_bound = this.min_bound;
            dpv.max_bound = this.max_bound;

            return(dpv);
        }
示例#3
0
        public static double Gradient2D(
            Func <double, double, XY> BaseFn,
            ParetoResultAnalyzer pra1, ParetoResultAnalyzer pra2,
            ParetoVariable pv1, ParetoVariable pv2,
            out double df_dpv1, out double df_dpv2
            )
        {
            double f = StaticEvaluation2D(BaseFn, pra1, pra2, pv1, pv2);

            df_dpv1 = (StaticEvaluation2D(BaseFn, pra1, pra2, pv1.get_increment(), pv2) - f) / pv1.dx;
            df_dpv2 = (StaticEvaluation2D(BaseFn, pra1, pra2, pv1, pv2.get_increment()) - f) / pv2.dx;
            return(f);
            //Debug.WriteLine("Coming from Gradient2D() to say that: (df_dpv1, df_dpv2) == " + "(" + df_dpv1 + ", " + df_dpv2 + ")");
        }
示例#4
0
        public static void FindSolution2D(
            Func <double, double, XY> BaseFn,
            ParetoResultAnalyzer pra1, ParetoResultAnalyzer pra2,
            ParetoVariable pv1, ParetoVariable pv2,
            int numTrials, string outPath = ""
            )
        {
            //Find 2-dimensional pareto solution using gradient descent, given two input biasing functions.

            File.Delete(outPath);

            double f       = 0;
            double df_dpv1 = 0;
            double df_dpv2 = 0;

            for (int trialsElapsed = 0; trialsElapsed < numTrials; trialsElapsed++)
            {
                f = Gradient2D(BaseFn, pra1, pra2, pv1, pv2, out df_dpv1, out df_dpv2);

                if (trialsElapsed % 100 == 0)
                {
                    Debug.WriteLine("Iteration: " + trialsElapsed + ".");
                    Debug.WriteLine("Static evaluation at f == (pv1, pv2): " + f + " == (" + pv1.val + ", " + pv2.val + ")");
                    Debug.WriteLine("(dpv1, dpv2) == " + "(" + df_dpv1 + ", " + df_dpv2 + ")");
                }

                pv1.adjust(df_dpv1);
                pv2.adjust(df_dpv2);

                if (trialsElapsed % 100 == 0)
                {
                    Console.WriteLine(trialsElapsed);
                    ExportData.AppendXYtoCSV(outPath, BaseFn(pv1.val, pv2.val));
                }
            }
        }