public static PositiveDefiniteMatrix GramMatrix(IKernelFunctionWithParams kf, Vector[] xData, int[] hypersToOptimise, ref PositiveDefiniteMatrix[] gradK) { int nData = xData.Length; // Allocate and fill the Kernel matrix. PositiveDefiniteMatrix K = new PositiveDefiniteMatrix(nData, nData); //gradK = Enumerable.Range(0, kf.ThetaCount).Select(_ => new PositiveDefiniteMatrix(nData, nData)).ToArray(); for (int i = 0; i < nData; i++) { for (int j = 0; j < nData; j++) { Vector temp = gradK == null ? null : Vector.Zero(kf.ThetaCount); // Evaluate the kernel. All hyperparameters, including noise // variance are handled in the kernel. K[i, j] = kf.EvaluateX1X2(xData[i], xData[j], ref temp); if (gradK != null) { for (int t = 0; t < hypersToOptimise.Length; t++) { gradK[t][i, j] = temp[hypersToOptimise[t]]; } } } } return(K); }
/// <summary> /// Evaluates the kernel for a pair of vectors /// </summary> /// <param name="x1">First vector</param> /// <param name="x2">Second vector</param> /// <param name="x1Deriv">Derivative of the kernel value with respect to x1 input vector</param> /// <param name="logThetaDeriv">Derivative of the kernel value with respect to the log hyper-parameters</param> /// <returns></returns> public override double EvaluateX1X2(Vector x1, Vector x2, ref Vector x1Deriv, ref Vector logThetaDeriv) { int totalCount = this.ThetaCount; double result = 0.0; Vector lthd; Vector lx1d; // Check that log theta deriv is correct size if (((object)logThetaDeriv) != null) { if (logThetaDeriv.Count != totalCount) { logThetaDeriv = Vector.Zero(totalCount); } } // Check that the x1 and x2 derivs are the correct size if (((object)x1Deriv) != null) { if (x1Deriv.Count != x1.Count) { x1Deriv = Vector.Zero(x1.Count); } lx1d = Vector.Zero(x1.Count); } else { lx1d = null; } int derivX = 0; for (int kx = 0; kx < kernels.Count; kx++) { IKernelFunctionWithParams k = kernels[kx]; int thcnt = thetaCount[kx]; if (((object)logThetaDeriv) != null) { lthd = Vector.Zero(thcnt); } else { lthd = null; } result += k.EvaluateX1X2(x1, x2, ref lx1d, ref lthd); if (((object)lthd) != null) { for (int ix = 0; ix < thcnt; ix++) { logThetaDeriv[derivX++] = lthd[ix]; } } if (((object)lx1d) != null) { x1Deriv.SetToSum(x1Deriv, lx1d); } } return(result); }
/// <summary> /// Local function that can be used to compare analytic with numeric derivatives /// </summary> /// <param name="kf">Kernel function</param> private void TestDerivatives(IKernelFunctionWithParams kf, Vector vec1, Vector vec2) { int thcnt = kf.ThetaCount; Vector analyticLogThetaDeriv = Vector.Zero(thcnt); Vector numericLogThetaDeriv = Vector.Zero(thcnt); Vector analytic_x1Deriv = Vector.Zero(vec1.Count); Vector numeric_x1Deriv = Vector.Zero(vec1.Count); Vector xNull = null; kf.EvaluateX1X2(vec1, vec2, ref analytic_x1Deriv, ref analyticLogThetaDeriv); // Calculate a numeric derivative for each parameter and compare with analytic for (int i = 0; i < thcnt; i++) { double d = kf[i]; kf[i] = d + DITHER; double dplus = kf.EvaluateX1X2(vec1, vec2, ref xNull, ref xNull); kf[i] = d - DITHER; double dminus = kf.EvaluateX1X2(vec1, vec2, ref xNull, ref xNull); numericLogThetaDeriv[i] = DITHER_MULT * (dplus - dminus); Assert.True(System.Math.Abs(numericLogThetaDeriv[i] - analyticLogThetaDeriv[i]) < TOLERANCE); // Restore value kf[i] = d; } // Calculate a numeric derivative for each of the two vectors, and compare with analytic for (int i = 0; i < vec1.Count; i++) { double d = vec1[i]; vec1[i] = d + DITHER; double dplus = kf.EvaluateX1X2(vec1, vec2, ref xNull, ref xNull); vec1[i] = d - DITHER; double dminus = kf.EvaluateX1X2(vec1, vec2, ref xNull, ref xNull); numeric_x1Deriv[i] = DITHER_MULT * (dplus - dminus); Assert.True(System.Math.Abs(numeric_x1Deriv[i] - analytic_x1Deriv[i]) < TOLERANCE); // Restore value vec1[i] = d; } }