Пример #1
0
        public Vector CalculateIteration()
        {
            calc_u = DifferentialEquation.GetSolutionTask1(manage_p, manage_f, phi, nu, L, T, aa, GRID_SIZE, TIME_SIZE);

            Matrix KSI = DifferentialEquation.GetSolutionTask2(y, calc_u, nu, L, T, aa, GRID_SIZE, TIME_SIZE);

            Vector ksi_l = new Vector(TIME_SIZE);

            for (int i = 0; i < TIME_SIZE; i++)
            {
                ksi_l[i] = KSI[i, GRID_SIZE - 1];
            }
            double alpha = alpha_old;

            double tau = MyMath.GetStep(TIME_SIZE, 0d, T);
            double h   = MyMath.GetStep(GRID_SIZE, 0d, L);

            switch (pickAlpha)
            {
            case PickAlpha.Lipshiz:
                alpha = ChooseAlpha_LipMethod();
                break;

            case PickAlpha.Divide:
                if (ITERATION > 0)
                {
                    alpha = ChooseAlpha_DivideMethod(alpha_old);
                }
                break;

            case PickAlpha.Lipshiz_CG:
                if (ITERATION > 0)
                {
                    alpha = ChooseAplpha_LipMethodForConditionalGradient(KSI, manage_f, alpha, tau, h);
                }
                //alpha = 1d / (ITERATION + 1);
                break;

            case PickAlpha.SUM:
                alpha = ChooseAlpha_Sum();
                break;

            case PickAlpha.Projection:
                alpha = ChooseAlpha_Projection(KSI, manage_f, alpha, R, tau, h);
                break;

            default:
                break;
            }



            manage_p = DifferentialEquation.GrdientProjection(manage_p, ksi_l, alpha, aa, P_MIN, P_MAX);
            //manage_f = DifferentialEquation.GrdientProjectionByF(manage_f, KSI, alpha, R, h, tau);
            ITERATION++;
            calc_u_old = calc_u;
            alpha_old  = alpha;
            return(manage_p);
        }
Пример #2
0
        public double ChooseAlpha_Projection(Matrix KSI, Matrix f, double alpha, double R, double tau, double h)
        {
            Vector u_temp = new Vector();
            double EPS    = 0.001d;
            double NORM   = 0;

            do
            {
                Matrix f_new = DifferentialEquation.GrdientProjectionByF(f, KSI, alpha, R, h, tau);
                f_new = Pu(f_new, R, h, tau);

                u_temp = DifferentialEquation.GetSolutionTask1(manage_p, f_new, phi, nu, L, T, aa, GRID_SIZE, TIME_SIZE);


                NORM  = Calculate_L2NORM(f - f_new, h, tau);
                alpha = alpha / 2.0;
            } while (Functional_J(calc_u) - Functional_J(u_temp) < EPS * NORM * NORM);

            return(alpha * 2.0);
        }
Пример #3
0
        public double ChooseAplpha_LipMethodForConditionalGradient(Matrix KSI, Matrix f, double alpha, double tau, double h)
        {
            double rho = ChooseAlpha_LipMethod();
            int    N   = f.Length.n;
            int    M   = f.Length.m;

            double NORM = 0;
            Matrix KSI2 = new Matrix(N, M);
            Matrix f2   = new Matrix(N, M);

            for (int i = 0; i < N; i++)
            {
                for (int j = 0; j < M; j++)
                {
                    KSI2[i, j] = Math.Pow(Math.Abs(KSI[i, j]), 2);
                }
            }
            NORM = Math.Sqrt(MyMath.IntegrateSurface(KSI2, tau, h));

            double f_prime = 0, temp = 0;

            for (int i = 0; i < N; i++)
            {
                for (int j = 0; j < M; j++)
                {
                    //R = 10;
                    f_prime  = -R * KSI[i, j] / NORM;
                    temp     = f_prime - f[i, j];
                    f2[i, j] = temp * temp;
                }
            }

            NORM = MyMath.IntegrateSurface(f2, tau, h);
            double dJ = DifferentialEquation.dJ_f(KSI, f, h, tau);

            //MessageBox.Show("dJ = " + dJ + " NORM = " + NORM);
            return(min(1d, rho * dJ / (NORM)));
        }