public ReturnData SearchMin() { ReturnData results = new ReturnData(N); // Шаг 1 f0 = f.getSum(x); results.F += 1; g = fg.getGrad(x); results.F += N + 1; g1 = new List <double>(g); results.F += N + 1; for (int i = 0; i < N; i++) // B0 = E { for (int j = 0; j < N; j++) { if (i == j) { B.Add(1); } else { B.Add(0); } } } double tmp = 0.0; for (int i = 0; i < N; i++) { if (tmp < Math.Abs(x[i])) { tmp = Math.Abs(x[i]); } } double h = Math.Sqrt(0.0001) * tmp; // Первый минимальный шаг countIter = 0; tmpX = new List <double>(x); midlineX.Add(tmpX); while (normVec(g) > epcilon && countIter <= iteration) { double d = 0.0; dd = 0.0; double ngt = 0.0; double ng1 = 0.0; gt = new List <double>(N); ksi = new List <double>(N); g0 = new List <double>(N); x1 = new List <double>(N); prevB = new List <double>(B); // Градиент в преобразованном пространствен gt for (int i = 0; i < N; i++) { gt.Add(0.0); d = 0.0; for (int j = 0; j < N; j++) { d += B[j + i * N] * g[j]; } gt[i] = d; // dd += d * g1[i]; ngt += d * d; ng1 += g1[i] * g1[i]; } ngt = Math.Sqrt(ngt); ng1 = Math.Sqrt(ng1); // dd /= ngt * ng1; // for (int i = 0; i < N; i++) { ksi.Add(gt[i] - g1[i]); } double nrmksi = normVec(ksi); if (countIter > 0) { for (int i = 0; i < N; i++) { ksi[i] /= nrmksi; } d = 0.0; for (int i = 0; i < N; i++) { d += ksi[i] * gt[i]; } ng1 = 0.0; d *= w; for (int i = 0; i < N; i++) /* g1=gt+(1 / alfa - 1)*(ksi*gt')*ksi: */ { dd = 0.0; g1[i] = gt[i] + d * ksi[i]; ng1 += g1[i] * g1[i]; for (int j = 0; j < N; j++) { dd += B[j * N + i] * ksi[j]; } dd *= w; /* Новая матрица преобразования: B = B ( E + (1/alpha -1)ksi*ksit' ) */ for (int j = 0; j < N; j++) { B[j * N + i] += dd * ksi[j]; } } ng1 = Math.Sqrt(ng1); } for (int i = 0; i < N; i++) { gt[i] = g1[i] / ng1; } /* Градиент в нетрансформированом пространстве: g0 = B' * gt */ for (int i = 0; i < N; i++) { d = 0.0; g0.Add(0.0); for (int j = 0; j < N; j++) { d += prevB[j * N + i] * gt[j]; } g0[i] = d; } adaptiveAdjustmentOfStep(results); g = fg.getGrad(x); results.F += N + 1; countIter += 1; } results.K = countIter; results.min = x.ToArray(); return(results); }
public ReturnData alg_LM_adapt(int N, double[] x0, double [] csi, double eps, double Kiter, double gamma0 = 10000, double v = 10) { double gamma = gamma0; double[] x_min = new double[N]; double[] x_k = new double[N]; double[] x_knew = new double[N]; double[] gradfR = new double[N]; double[] d_k = new double[N]; double[][] Gessian = new double[N][]; double[][] GessianInv = new double[N][]; ReturnData result = new ReturnData(N); for (int i = 0; i < N; i++) { Gessian[i] = new double[N]; GessianInv[i] = new double[N]; x_k[i] = x0[i]; x_knew[i] = x0[i]; } int k = 0; while (k <= Kiter) { //Вычисление градиента grad_funR(x_k, gradfR, csi, N); result.F += 2 * N + 1; //Проверка критерия останова if (norma(gradfR, N) < eps) { //cout<<" STOP"<<endl; for (int i = 0; i < N; i++) { x_min[i] = x_k[i]; } break; } gessian_funR(x_k, Gessian, csi, N); result.F += N * N + 1; while (R_fun(x_knew, csi, N) >= R_fun(x_k, csi, N)) { result.F += 4; //Прибавление к главной диагонали гессиана шрафной добавки (параметра регуляяризации) for (int i = 0; i < N; i++) { Gessian[i][i] += gamma;//*Gessian[i][i]; } //Нахождение обратной матрицы inverse(Gessian, GessianInv, N);; //Вычисление направления поиска d_k = Matr_x_Vect(GessianInv, gradfR, N); Vect_x_a(d_k, -1, N); result.D.Add(d_k); //Вычисление новой точки x_k+1 for (int i = 0; i < N; i++) { x_knew[i] = x_k[i] + d_k[i]; } //cout<<"f_k+1 >= f_k"<<endl; if (R_fun(x_knew, csi, N) >= R_fun(x_k, csi, N)) { gamma = v * gamma; } } for (int i = 0; i < N; i++) { x_k[i] = x_knew[i]; } gamma = gamma / v; k++; } result.K = k; result.min = x_min; return(result); }
public ReturnData SearchMin() { ReturnData results = new ReturnData(N); // Шаг 1 f0 = f.getSum(x); results.F += 1; g = fg.getGrad(x); results.F += N+1; g1 = new List<double>(g); results.F += N + 1; for (int i = 0; i < N; i++) // B0 = E for (int j = 0; j < N; j++) { if (i == j) B.Add(1); else B.Add(0); } double tmp = 0.0; for (int i = 0; i < N; i++) { if (tmp < Math.Abs(x[i])) tmp = Math.Abs(x[i]); } double h = Math.Sqrt(0.0001) * tmp; // Первый минимальный шаг countIter = 0; tmpX = new List<double>(x); midlineX.Add(tmpX); while (normVec(g) > epcilon && countIter <= iteration) { double d = 0.0; dd = 0.0; double ngt = 0.0; double ng1 = 0.0; gt = new List<double>(N); ksi = new List<double>(N); g0 = new List<double>(N); x1 = new List<double>(N); prevB = new List<double>(B); // Градиент в преобразованном пространствен gt for (int i = 0; i < N; i++) { gt.Add(0.0); d = 0.0; for (int j = 0; j < N; j++) d += B[j + i * N] * g[j]; gt[i] = d; // dd += d * g1[i]; ngt += d * d; ng1 += g1[i] * g1[i]; } ngt = Math.Sqrt(ngt); ng1 = Math.Sqrt(ng1); // dd /= ngt * ng1; // for (int i = 0; i < N; i++) { ksi.Add(gt[i] - g1[i]); } double nrmksi = normVec(ksi); if (countIter > 0) { for (int i = 0; i < N; i++) ksi[i] /= nrmksi; d = 0.0; for (int i = 0; i < N; i++) d += ksi[i] * gt[i]; ng1 = 0.0; d *= w; for (int i = 0; i < N; i++) /* g1=gt+(1 / alfa - 1)*(ksi*gt')*ksi: */ { dd = 0.0; g1[i] = gt[i] + d * ksi[i]; ng1 += g1[i] * g1[i]; for (int j = 0; j < N; j++) dd += B[j * N + i] * ksi[j]; dd *= w; /* Новая матрица преобразования: B = B ( E + (1/alpha -1)ksi*ksit' ) */ for (int j = 0; j < N; j++) B[j * N + i] += dd * ksi[j]; } ng1 = Math.Sqrt(ng1); } for (int i = 0; i < N; i++) gt[i] = g1[i] / ng1; /* Градиент в нетрансформированом пространстве: g0 = B' * gt */ for (int i = 0; i < N; i++) { d = 0.0; g0.Add(0.0); for (int j = 0; j < N; j++) d += prevB[j * N + i] * gt[j]; g0[i] = d; } adaptiveAdjustmentOfStep(results); g = fg.getGrad(x); results.F += N + 1; countIter += 1; } results.K = countIter; results.min = x.ToArray(); return results; }
private void adaptiveAdjustmentOfStep(ReturnData results) { for (int i = 0; i < N; i++) { x1.Add(x[i]); } double hp = h; bool ksm = false; double k1 = 0.0, k2 = 0.0; double kc = 0.0; double ii, stepvanish = 0.0; double du20 = 2.0, du10 = 1.5, du03 = 1.05; while (true) { for (int i = 0; i < N; i++) { x1[i] = x[i]; } double f1 = f0; if (f1 < 0.0) { dd = -1.0; } else { dd = 1.0; } /* Следующе испытание: */ for (int i = 0; i < N; i++) { x[i] -= hp * g0[i]; } ii = 0.0; for (int i = 0; i < N; i++) { if (Math.Abs(x[i] - x1[i]) < Math.Abs(x[i]) * epsnorm) { ii += 1; } } /* Функция в текущее точке: */ f0 = f.getSum(x); results.F += 1; if (ii == N) { stepvanish += 1; if (stepvanish >= 5) { return; } else { for (int i = 0; i < N; i++) { x[i] = x1[i]; } f0 = f1; hp *= 10.0; ksm = true; } } /* используем маленький шаг: */ else if (f0 > f1) { if (ksm) { return; } k2 += 1; k1 = 0; hp /= dq; for (int i = 0; i < N; i++) { x[i] = x1[i]; } f0 = f1; if (kc >= mxtc) { return; } } else { if (-1.0 * f0 <= -1.0 * f1) { return; } /* Используеем больший шаг */ k1 += 1; if (k2 > 0) { kc += 1; } k2 = 0; if (k1 >= 20) { hp *= du20; } else if (k1 >= 10) { hp *= du10; } else if (k1 >= 3) { hp *= du03; } } } }
private void adaptiveAdjustmentOfStep(ReturnData results) { for (int i = 0; i < N; i++) x1.Add(x[i]); double hp = h; bool ksm = false; double k1 = 0.0, k2 = 0.0; double kc = 0.0; double ii, stepvanish = 0.0; double du20 = 2.0, du10 = 1.5, du03 = 1.05; while (true) { for (int i = 0; i < N; i++) x1[i] = x[i]; double f1 = f0; if (f1 < 0.0) dd = -1.0; else dd = 1.0; /* Следующе испытание: */ for (int i = 0; i < N; i++) x[i] -= hp * g0[i]; ii = 0.0; for (int i = 0; i < N; i++) { if (Math.Abs(x[i] - x1[i]) < Math.Abs(x[i]) * epsnorm) ii += 1; } /* Функция в текущее точке: */ f0 = f.getSum(x); results.F += 1; if (ii == N) { stepvanish += 1; if (stepvanish >= 5) return; else { for (int i = 0; i < N; i++) x[i] = x1[i]; f0 = f1; hp *= 10.0; ksm = true; } } /* используем маленький шаг: */ else if (f0 > f1) { if (ksm) return; k2 += 1; k1 = 0; hp /= dq; for (int i = 0; i < N; i++) x[i] = x1[i]; f0 = f1; if (kc >= mxtc) return; } else { if (-1.0 * f0 <= -1.0 * f1) return; /* Используеем больший шаг */ k1 += 1; if (k2 > 0) kc += 1; k2 = 0; if (k1 >= 20) hp *= du20; else if (k1 >= 10) hp *= du10; else if (k1 >= 3) hp *= du03; } } }
private void test2() { int N = 10; Random rand = new Random(); int[] Ntest = { 2, 3, 5, 10, 30, 50, 100 }; double[] eps = { 0.1, 0.01, 0.001, 0.0001, 0.00001, 0.000001, 0.0000001 }; double[][] Delta = new double[7][]; int[][] Kstop = new int[7][]; for (int i = 0; i < 7; i++) { Delta[i] = new double[7]; Kstop[i] = new int[7]; } int K = 50; double[] sred = new double[K]; int[] sredK = new int[K]; for (int n = 0; n < 7; n++) //для каждого N { N = Ntest[n]; ReturnData result1 = new ReturnData(N); double[] x0 = new double[N]; double[] _x0 = new double[N]; double[] del = new double[N]; double[] csi = new double[N]; for (int i = 0; i < 7; i++) //изменяем eps { for (int j = 0; j < N; j++) { csi[j] = rand.NextDouble() + 000.1; x0[j] = rand.Next(-10, -5); } for (int k = 0; k < K; k++) //несколько итераций для усреднения { ShorMethod method1 = new ShorMethod(eps[i], new List <double>(x0), N, 2, new List <double>(csi), Klimitation); result1 = method1.SearchMin(); sredK[k] = result1.K; Console.WriteLine(N); for (int j = 0; j < N; j++) { del[j] = 1 - result1.min[j]; } sred[k] = method1.normVec(new List <double>(del)); } for (int j = 0; j < K; j++) { Delta[i][n] += sred[j]; Kstop[i][n] += sredK[j]; } Delta[i][n] /= K; Kstop[i][n] /= K; } } CreateLineGraph2(); Graw_Draw2(eps, Delta); CreateLineGraph3(Klimitation); Graw_Draw3(eps, Kstop); }
public ReturnData alg_LM_adapt(int N, double[] x0, double []csi, double eps, double Kiter, double gamma0 = 10000, double v = 10) { double gamma = gamma0; double[] x_min = new double[N]; double[] x_k = new double[N]; double[] x_knew = new double[N]; double[] gradfR = new double[N]; double[] d_k = new double[N]; double[][] Gessian = new double[N][]; double[][] GessianInv = new double[N][]; ReturnData result = new ReturnData(N); for (int i = 0; i < N; i++) { Gessian[i] = new double[N]; GessianInv[i] = new double[N]; x_k[i] = x0[i]; x_knew[i] = x0[i]; } int k = 0; while (k <= Kiter) { //Вычисление градиента grad_funR(x_k, gradfR, csi, N); result.F += 2*N + 1; //Проверка критерия останова if (norma(gradfR, N) < eps) { //cout<<" STOP"<<endl; for (int i = 0; i < N; i++) { x_min[i] = x_k[i]; } break; } gessian_funR(x_k, Gessian, csi, N); result.F += N * N + 1; while (R_fun(x_knew, csi, N) >= R_fun(x_k, csi, N)) { result.F += 4; //Прибавление к главной диагонали гессиана шрафной добавки (параметра регуляяризации) for (int i = 0; i < N; i++) Gessian[i][i] += gamma;//*Gessian[i][i]; //Нахождение обратной матрицы inverse(Gessian, GessianInv, N); ; //Вычисление направления поиска d_k = Matr_x_Vect(GessianInv, gradfR, N); Vect_x_a(d_k, -1, N); result.D.Add(d_k); //Вычисление новой точки x_k+1 for (int i = 0; i < N; i++) { x_knew[i] = x_k[i] + d_k[i]; } //cout<<"f_k+1 >= f_k"<<endl; if (R_fun(x_knew, csi, N) >= R_fun(x_k, csi, N)) gamma = v * gamma; } for (int i = 0; i < N; i++) x_k[i] = x_knew[i]; gamma = gamma / v; k++; } result.K = k; result.min = x_min; return result; }
private void test2() { int N = 10; Random rand = new Random(); int[] Ntest = { 2, 3, 5, 10, 30, 50, 100 }; double[] eps = { 0.1, 0.01, 0.001, 0.0001, 0.00001, 0.000001, 0.0000001 }; double[][] Delta = new double[7][]; int[][] Kstop = new int[7][]; for (int i = 0; i < 7; i++) { Delta[i] = new double[7]; Kstop[i] = new int[7]; } int K = 50; double[] sred = new double[K]; int[] sredK = new int[K]; for (int n = 0; n < 7; n++) //для каждого N { N = Ntest[n]; ReturnData result1 = new ReturnData(N); double[] x0 = new double[N]; double[] _x0 = new double[N]; double[] del = new double[N]; double[] csi = new double[N]; for (int i = 0; i < 7; i++) //изменяем eps { for (int j = 0; j < N; j++) { csi[j] = rand.NextDouble() + 000.1; x0[j] = rand.Next(-10, -5); } for (int k = 0; k < K; k++) //несколько итераций для усреднения { ShorMethod method1 = new ShorMethod(eps[i], new List<double>(x0), N, 2, new List<double>(csi), Klimitation); result1 = method1.SearchMin(); sredK[k] = result1.K; Console.WriteLine(N); for (int j = 0; j < N; j++) { del[j] = 1 - result1.min[j]; } sred[k] = method1.normVec(new List<double>(del)); } for (int j = 0; j < K; j++) { Delta[i][n] += sred[j]; Kstop[i][n] += sredK[j]; } Delta[i][n] /= K; Kstop[i][n] /= K; } } CreateLineGraph2(); Graw_Draw2(eps, Delta); CreateLineGraph3(Klimitation); Graw_Draw3(eps, Kstop); }
private void test1() { int N = 0; Random rand = new Random(); int[] Ntest = { 2, 3, 5, 10, 30, 50, 100 }; int[] Kstop = new int[7]; int K = 50; //Количество повторений int[] sredK = new int[K]; int[] sredF = new int[K]; int[] F = new int[7]; double a = 0.0; for (int i = 0; i < 7; i++) { Kstop[i] = 0; N = Ntest[i]; ReturnData result1 = new ReturnData(N); double[] x0 = new double[N]; double[] _x0 = new double[N]; double[] csi = new double[N]; for (int j = 0; j < K; j++) { for (int k = 0; k < N; k++) { csi[k] = rand.NextDouble() + 000.1; x0[k] = rand.Next(-10, -5); } ShorMethod method1 = new ShorMethod(epsilon, new List<double>(x0), N, 2, new List<double>(csi), Klimitation); result1 = method1.SearchMin(); sredK[j] = result1.K; sredF[j] = result1.F; Console.WriteLine(N); } for (int j = 0; j < K; j++) { Kstop[i] += sredK[j]; F[i] += sredF[j]; } Kstop[i] /= K; F[i] /= K; } CreateLineGraph1(Klimitation); Graw_Draw1(Ntest, Kstop); for (int i = 0; i < 6; i++) a = Math.Max(F[i], F[i + 1]); CreateLineGraph4(a); Graw_Draw4(Ntest, F); }