private double[] gradient(double[] w) { double[] y = Outputs; double p = Epsilon; sizeI = 0; for (int i = 0; i < y.Length; i++) { double d = z[i] - y[i]; // generate index set I if (d < -p) { z[sizeI] = C[i] * (d + p); I[sizeI] = i; sizeI++; } else if (d > p) { z[sizeI] = C[i] * (d - p); I[sizeI] = i; sizeI++; } } LinearNewtonMethod.subXTv(Inputs, biasIndex, I, sizeI, z, g); for (int i = 0; i < w.Length; i++) { g[i] = w[i] + 2 * g[i]; } return(g); }
private double objective(double[] w) { double[] y = Outputs; double p = Epsilon; LinearNewtonMethod.Xv(Inputs, biasIndex, w, z); double f = 0; for (int i = 0; i < w.Length; i++) { f += w[i] * w[i]; } f /= 2; for (int i = 0; i < y.Length; i++) { double d = z[i] - y[i]; if (d < -p) { f += C[i] * (d + p) * (d + p); } else if (d > p) { f += C[i] * (d - p) * (d - p); } } return(f); }
private double[] hessian(double[] s) { LinearNewtonMethod.subXv(Inputs, biasIndex, I, sizeI, s, wa); for (int i = 0; i < sizeI; i++) { wa[i] = C[I[i]] * wa[i]; } LinearNewtonMethod.subXTv(Inputs, biasIndex, I, sizeI, wa, h); for (int i = 0; i < s.Length; i++) { h[i] = s[i] + 2 * h[i]; } return(h); }