public static void Set(ManagedArray dst, double value) { for (var i = 0; i < dst.Length(); i++) { dst[i] = value; } }
public static double Fourier(ManagedArray x1, ManagedArray x2, ManagedArray k) { Vectorize(x1, x2); var z = new ManagedArray(x1); double prod = 0; double m = k.Length() > 0 ? k[0] : 1; for (var i = 0; i < x1.Length(); i++) { z[i] = Math.Sin(m + 0.5) * 2; var d = x1[i] - x2[i]; z[i] = Math.Abs(d) > 0 ? Math.Sin(m + 0.5) * d / Math.Sin(d * 0.5) : z[i]; prod = (i == 0) ? z[i] : prod * z[i]; } ManagedOps.Free(z); return(prod); }
public static double Sigmoid(ManagedArray x1, ManagedArray x2, ManagedArray k) { double m = k.Length() > 0 ? k[0] : 1; double b = k.Length() > 1 ? k[1] : 0; return(Math.Tanh(m * Multiply(x1, x2) / x1.Length() + b)); }
public static double Radial(ManagedArray x1, ManagedArray x2, ManagedArray k) { double sigma = k.Length() > 0 ? k[0] : 1; double denum = 2 * sigma * sigma; return(Math.Abs(denum) > 0 ? Math.Exp(-Math.Sqrt(SquaredDiff(x1, x2)) / denum) : 0); }
public static double Gaussian(ManagedArray x1, ManagedArray x2, ManagedArray k) { var x = SquaredDiff(x1, x2); double sigma = k.Length() > 0 ? k[0] : 1; double denum = 2 * sigma * sigma; return(Math.Abs(denum) > 0 ? Math.Exp(-x / denum) : 0); }
static double SquaredDiff(ManagedArray x1, ManagedArray x2) { Vectorize(x1, x2); double x = 0; for (var i = 0; i < x1.Length(); i++) { var d = x1[i] - x2[i]; x += d * d; } return(x); }
// Copies a 3D [x][y][z] to 4D [index][x][y][z] public static void Copy3D4D(ManagedArray dst, ManagedArray src, int index) { MemCopy(dst, index * src.Length(), src, 0, src.Length()); }
// Copies a 4D [index][x][y][z] to 3D [x][y][z] public static void Copy4D3D(ManagedArray dst, ManagedArray src, int index) { MemCopy(dst, 0, src, index * dst.Length(), dst.Length()); }
public void Setup(ManagedArray x, ManagedArray y, double c, KernelType kernel, ManagedArray param, double tolerance = 0.001, int maxpasses = 5, int category = 1) { ManagedOps.Free(dx, dy); dx = new ManagedArray(x); dy = new ManagedArray(y); ManagedOps.Copy2D(dx, x, 0, 0); ManagedOps.Copy2D(dy, y, 0, 0); ktype = kernel; // Data parameters var m = Rows(dx); Category = category; MaxIterations = maxpasses; Tolerance = tolerance; C = c; // Reset internal variables ManagedOps.Free(K, kparam, E, alpha); kparam = new ManagedArray(param); ManagedOps.Copy2D(kparam, param, 0, 0); // Variables alpha = new ManagedArray(1, m); E = new ManagedArray(1, m); b = 0; Iterations = 0; // Pre-compute the Kernel Matrix since our dataset is small // (In practice, optimized SVM packages that handle large datasets // gracefully will *not* do this) if (kernel == KernelType.LINEAR) { // Computation for the Linear Kernel // This is equivalent to computing the kernel on every pair of examples var tinput = ManagedMatrix.Transpose(dx); K = ManagedMatrix.Multiply(dx, tinput); double slope = kparam.Length() > 0 ? kparam[0] : 1; double inter = kparam.Length() > 1 ? kparam[1] : 0; ManagedMatrix.Multiply(K, slope); ManagedMatrix.Add(K, inter); ManagedOps.Free(tinput); } else if (kernel == KernelType.GAUSSIAN || kernel == KernelType.RADIAL) { // RBF Kernel // This is equivalent to computing the kernel on every pair of examples var pX2 = ManagedMatrix.Pow(dx, 2); var rX2 = ManagedMatrix.RowSums(pX2); var tX2 = ManagedMatrix.Transpose(rX2); var trX = ManagedMatrix.Transpose(dx); var tempK = new ManagedArray(m, m); var temp1 = new ManagedArray(m, m); var temp2 = ManagedMatrix.Multiply(dx, trX); ManagedMatrix.Expand(rX2, m, 1, tempK); ManagedMatrix.Expand(tX2, 1, m, temp1); ManagedMatrix.Multiply(temp2, -2); ManagedMatrix.Add(tempK, temp1); ManagedMatrix.Add(tempK, temp2); double sigma = kparam.Length() > 0 ? kparam[0] : 1; var g = Math.Abs(sigma) > 0 ? Math.Exp(-1 / (2 * sigma * sigma)) : 0; if (Type == KernelType.RADIAL) { ManagedMatrix.Sqrt(tempK); } K = ManagedMatrix.Pow(g, tempK); ManagedOps.Free(pX2, rX2, tX2, trX, tempK, temp1, temp2); } else { // Pre-compute the Kernel Matrix // The following can be slow due to the lack of vectorization K = new ManagedArray(m, m); var Xi = new ManagedArray(Cols(dx), 1); var Xj = new ManagedArray(Cols(dx), 1); for (var i = 0; i < m; i++) { ManagedOps.Copy2D(Xi, dx, 0, i); for (var j = 0; j < m; j++) { ManagedOps.Copy2D(Xj, dx, 0, j); K[j, i] = KernelFunction.Run(kernel, Xi, Xj, kparam); // the matrix is symmetric K[i, j] = K[j, i]; } } ManagedOps.Free(Xi, Xj); } eta = 0; L = 0; H = 0; // Map 0 (or other categories) to -1 for (var i = 0; i < Rows(dy); i++) { dy[i] = (int)dy[i] != Category ? -1 : 1; } }
// SVMPREDICT returns a vector of predictions using a trained SVM model //(svm_train). // // pred = SVMPREDICT(model, X) returns a vector of predictions using a // trained SVM model (svm_train). X is a mxn matrix where there each // example is a row. model is a svm model returned from svm_train. // predictions pred is a m x 1 column of predictions of {0, 1} values. // // Converted to R by: SD Separa (2016/03/18) // Converted to C# by: SD Separa (2018/09/29) public ManagedArray Predict(ManagedArray input) { var predictions = new ManagedArray(1, Rows(input)); if (Trained) { var x = new ManagedArray(input); if (Cols(x) == 1) { ManagedMatrix.Transpose(x, input); } else { ManagedOps.Copy2D(x, input, 0, 0); } var m = Rows(x); predictions.Resize(1, m); if (Type == KernelType.LINEAR) { ManagedMatrix.Multiply(predictions, x, W); ManagedMatrix.Add(predictions, B); } else if (Type == KernelType.GAUSSIAN || Type == KernelType.RADIAL) { // RBF Kernel // This is equivalent to computing the kernel on every pair of examples var pX1 = ManagedMatrix.Pow(x, 2); var pX2 = ManagedMatrix.Pow(ModelX, 2); var rX2 = ManagedMatrix.RowSums(pX2); var X1 = ManagedMatrix.RowSums(pX1); var X2 = ManagedMatrix.Transpose(rX2); var tX = ManagedMatrix.Transpose(ModelX); var tY = ManagedMatrix.Transpose(ModelY); var tA = ManagedMatrix.Transpose(Alpha); var rows = Rows(X1); var cols = Cols(X2); var tempK = new ManagedArray(cols, rows); var temp1 = new ManagedArray(cols, rows); var temp2 = ManagedMatrix.Multiply(x, tX); ManagedMatrix.Multiply(temp2, -2); ManagedMatrix.Expand(X1, cols, 1, tempK); ManagedMatrix.Expand(X2, 1, rows, temp1); ManagedMatrix.Add(tempK, temp1); ManagedMatrix.Add(tempK, temp2); var sigma = KernelParam.Length() > 0 ? KernelParam[0] : 1; if (Type == KernelType.RADIAL) { ManagedMatrix.Sqrt(tempK); } var g = Math.Abs(sigma) > 0 ? Math.Exp(-1 / (2 * sigma * sigma)) : 0; var Kernel = ManagedMatrix.Pow(g, tempK); var tempY = new ManagedArray(Cols(tY), rows); var tempA = new ManagedArray(Cols(tA), rows); ManagedMatrix.Expand(tY, 1, rows, tempY); ManagedMatrix.Expand(tA, 1, rows, tempA); ManagedMatrix.Product(Kernel, tempY); ManagedMatrix.Product(Kernel, tempA); var p = ManagedMatrix.RowSums(Kernel); ManagedOps.Copy2D(predictions, p, 0, 0); ManagedMatrix.Add(predictions, B); ManagedOps.Free(pX1, pX2, rX2, X1, X2, tempK, temp1, temp2, tX, tY, tA, tempY, tempA, Kernel, p); } else { var Xi = new ManagedArray(Cols(x), 1); var Xj = new ManagedArray(Cols(ModelX), 1); for (var i = 0; i < m; i++) { double prediction = 0; ManagedOps.Copy2D(Xi, x, 0, i); for (var j = 0; j < Rows(ModelX); j++) { ManagedOps.Copy2D(Xj, ModelX, 0, j); prediction += Alpha[j] * ModelY[j] * KernelFunction.Run(Type, Xi, Xj, KernelParam); } predictions[i] = prediction + B; } ManagedOps.Free(Xi, Xj); } ManagedOps.Free(x); } return(predictions); }