public static Matrix ConditionalGradientByF(Matrix f_old, Matrix KSI, double alpha, double R, double h, double tau) { int N = f_old.Length.n; int M = f_old.Length.m; double NORM = 0; Matrix KSI2 = new Matrix(N, M); Matrix f_new = new Matrix(N, M); for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { KSI2[i, j] = Math.Pow(Math.Abs(KSI[i, j]), 2); } } NORM = Math.Sqrt(MyMath.IntegrateSurface(KSI2, tau, h)); double f_prime = 0; for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { //R = 10; f_prime = -R * KSI[i, j] / NORM; f_new[i, j] = (1d - alpha) * f_old[i, j] + alpha * f_prime; } } return(f_new); }
public Matrix Pu(Matrix f, double R, double h, double tau) { int N = f.Length.n; int M = f.Length.m; double NORM = 0; Matrix f2 = new Matrix(N, M); for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { f2[i, j] = Math.Pow(Math.Abs(f[i, j]), 2); } } NORM = Math.Sqrt(MyMath.IntegrateSurface(f2, tau, h)); if (NORM < R) { return(f); } else { for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { f2[i, j] = R * f[i, j] / NORM; } } return(f2); } }
public static double dJ_f(Matrix KSI, Matrix f, double h, double tau) { int N = f.Length.n; int M = f.Length.m; double I = 0; Matrix KSI2 = new Matrix(N, M); for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { KSI2[i, j] = KSI[i, j] * f[i, j]; } } I = MyMath.IntegrateSurface(KSI2, tau, h); return(Math.Abs(I)); }
double Calculate_L2NORM(Matrix f, double h, double tau) { int N = f.Length.n; int M = f.Length.m; double NORM = 0; Matrix f2 = new Matrix(N, M); for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { f2[i, j] = Math.Pow(Math.Abs(f[i, j]), 2); } } NORM = Math.Sqrt(MyMath.IntegrateSurface(f2, tau, h)); return(NORM); }
public static Matrix GrdientProjectionByF(Matrix f_old, Matrix KSI, double alpha, double R, double h, double tau) { int N = f_old.Length.n; int M = f_old.Length.m; double I = 0; Matrix T = new Matrix(N, M); Matrix f_new = new Matrix(N, M); for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { T[i, j] = Math.Pow(Math.Abs(f_old[i, j] - alpha * KSI[i, j]), 2); } } I = MyMath.IntegrateSurface(T, tau, h); if (I <= R * R) { for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { f_new[i, j] = f_old[i, j] - alpha * KSI[i, j]; } } } else { for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { f_new[i, j] = R * (f_old[i, j] - alpha * KSI[i, j]) / Math.Sqrt(I); } } } return(f_new); }
public double ChooseAplpha_LipMethodForConditionalGradient(Matrix KSI, Matrix f, double alpha, double tau, double h) { double rho = ChooseAlpha_LipMethod(); int N = f.Length.n; int M = f.Length.m; double NORM = 0; Matrix KSI2 = new Matrix(N, M); Matrix f2 = new Matrix(N, M); for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { KSI2[i, j] = Math.Pow(Math.Abs(KSI[i, j]), 2); } } NORM = Math.Sqrt(MyMath.IntegrateSurface(KSI2, tau, h)); double f_prime = 0, temp = 0; for (int i = 0; i < N; i++) { for (int j = 0; j < M; j++) { //R = 10; f_prime = -R * KSI[i, j] / NORM; temp = f_prime - f[i, j]; f2[i, j] = temp * temp; } } NORM = MyMath.IntegrateSurface(f2, tau, h); double dJ = DifferentialEquation.dJ_f(KSI, f, h, tau); //MessageBox.Show("dJ = " + dJ + " NORM = " + NORM); return(min(1d, rho * dJ / (NORM))); }