public static void Bitmap(Bitmap Digit, ManagedCNN cnn, ref int digit, ref double[] Probability) { // Bitmap Data is transposed var Transposed = new ManagedArray(28, 28, 1); var TestDigit = new ManagedArray(28, 28, 1); var ScaledDigit = Resize(Digit, 28, 28, true); Convert(ScaledDigit, TestDigit); ManagedMatrix.Transpose(Transposed, TestDigit); cnn.FeedForward(Transposed); digit = 0; double max = 0; for (int y = 0; y < cnn.Output.y; y++) { var val = cnn.Output[0, y]; Probability[y] = val; if (val > max) { max = val; digit = y; } } ScaledDigit.Dispose(); ManagedOps.Free(TestDigit, Transposed); }
protected void CleanShutdown() { // Clean-Up Routines Here Network.Free(); ManagedOps.Free(InputData, OutputData, TestData, NormalizationData); }
public static double Fourier(ManagedArray x1, ManagedArray x2, ManagedArray k) { Vectorize(x1, x2); var z = new ManagedArray(x1); double prod = 0; double m = k.Length() > 0 ? k[0] : 1; for (var i = 0; i < x1.Length(); i++) { z[i] = Math.Sin(m + 0.5) * 2; var d = x1[i] - x2[i]; z[i] = Math.Abs(d) > 0 ? Math.Sin(m + 0.5) * d / Math.Sin(d * 0.5) : z[i]; prod = (i == 0) ? z[i] : prod * z[i]; } ManagedOps.Free(z); return(prod); }
public static Pixbuf Get(ManagedCNN cnn, int layer) { if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution) { var Transposed = new ManagedArray(cnn.Layers[layer].Bias); ManagedMatrix.Transpose(Transposed, cnn.Layers[layer].Bias); var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, Transposed.x, Transposed.y); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; FullyConnected.GetNormalization(Transposed, ref min, ref max); Activation.Draw(pixbuf, Transposed, min, max); ManagedOps.Free(Transposed); return(pixbuf); } // return empty pixbuf return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1)); }
public static void Pixbuf(Pixbuf Digit, ManagedCNN cnn, ref int digit, ref double[] Probability) { // Bitmap Data is transposed var Transposed = new ManagedArray(28, 28, 1); var TestDigit = new ManagedArray(28, 28, 1); var ScaledDigit = Digit.ScaleSimple(28, 28, InterpType.Hyper); Convert(ScaledDigit, TestDigit); ManagedMatrix.Transpose(Transposed, TestDigit); cnn.FeedForward(Transposed); digit = 0; double max = double.MinValue; for (int y = 0; y < cnn.Output.y; y++) { var val = cnn.Output[0, y]; Probability[y] = val; if (val > max) { max = val; digit = y; } } ScaledDigit.Dispose(); ManagedOps.Free(TestDigit, Transposed); }
public static Pixbuf Get(ManagedCNN cnn, int layer, int i, int j) { if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution && i >= 0 && i < cnn.Layers[layer].FeatureMap.i && j >= 0 && j < cnn.Layers[layer].FeatureMap.j) { var FeatureMap = new ManagedArray(cnn.Layers[layer].FeatureMap.x, cnn.Layers[layer].FeatureMap.y, cnn.Layers[layer].FeatureMap.z); var Transposed = new ManagedArray(FeatureMap); var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, FeatureMap.y, FeatureMap.x); ManagedOps.Copy4DIJ2D(FeatureMap, cnn.Layers[layer].FeatureMap, i, j); ManagedMatrix.Transpose(Transposed, FeatureMap); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; FullyConnected.GetNormalization(Transposed, ref min, ref max); Activation.Draw(pixbuf, Transposed, min, max); ManagedOps.Free(Transposed); return(pixbuf); } // return empty pixbuf return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1)); }
public static Pixbuf Get(ManagedArray layer, bool transpose = true) { if (transpose) { var Transposed = new ManagedArray(layer, false); ManagedMatrix.Transpose(Transposed, layer); var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, Transposed.x, Transposed.y); double min = Double.MaxValue; double max = Double.MinValue; GetNormalization(Transposed, ref min, ref max); Activation.Draw(pixbuf, Transposed, min, max); ManagedOps.Free(Transposed); return(pixbuf); } else { var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, layer.x, layer.y); double min = Double.MaxValue; double max = Double.MinValue; GetNormalization(layer, ref min, ref max); Activation.Draw(pixbuf, layer, min, max); return(pixbuf); } }
public void Free() { // public variables ManagedOps.Free(ModelX, ModelY, Alpha, W, KernelParam); // internal variables ManagedOps.Free(K, E, alpha, kparam, dx, dy); }
public static void Plot(Pixbuf pixbuf, ManagedArray x, Model model, int f1 = 0, int f2 = 1) { var classification = model.Classify(x); Points(pixbuf, x, classification, f1, f2); ManagedOps.Free(classification); }
public static void Points(Pixbuf pixbuf, ManagedDNN network, NeuralNetworkOptions opts, double threshold, ManagedArray x, int width, int height, int f1 = 0, int f2 = 0) { var m = Rows(x); minx = Double.MaxValue; maxx = Double.MinValue; miny = Double.MaxValue; maxy = Double.MinValue; f1 = f1 >= 0 && f1 < Cols(x) ? f1 : 0; f2 = f2 >= 0 && f2 < Cols(x) ? f2 : 0; for (var j = 0; j < m; j++) { minx = Math.Min(x[f1, j], minx); maxx = Math.Max(x[f1, j], maxx); miny = Math.Min(x[f2, j], miny); maxy = Math.Max(x[f2, j], maxy); } deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; minx = minx - 8 * deltax; maxx = maxx + 8 * deltax; miny = miny - 8 * deltay; maxy = maxy + 8 * deltay; deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; var colors = Common.Palette2(); colors.Shuffle(); var PlotOptions = opts; PlotOptions.Items = Rows(x); var classification = network.Classify(x, PlotOptions, threshold); Points(pixbuf, x, classification, colors, f1, f2); // Plot bounding box var cw = pixbuf.Width - 1; var ch = pixbuf.Height; var border = new Color(128, 128, 128); Common.Line(pixbuf, 0, 1, cw, 1, border); Common.Line(pixbuf, cw, 1, cw, ch, border); Common.Line(pixbuf, 0, ch, cw, ch, border); Common.Line(pixbuf, 0, 1, 0, ch, border); ManagedOps.Free(classification); }
public void Generate() { var m = Rows(dx); var n = Cols(dx); var idx = 0; for (var i = 0; i < m; i++) { if (Math.Abs(alpha[i]) > 0) { idx++; } } ManagedOps.Free(ModelX, ModelY, Alpha, W, KernelParam); ModelX = new ManagedArray(Cols(dx), idx); ModelY = new ManagedArray(1, idx); Alpha = new ManagedArray(1, idx); KernelParam = new ManagedArray(kparam); var ii = 0; for (var i = 0; i < m; i++) { if (Math.Abs(alpha[i]) > 0) { for (int j = 0; j < n; j++) { ModelX[j, ii] = dx[j, i]; } ModelY[ii] = dy[i]; Alpha[ii] = alpha[i]; ii++; } } B = b; Passes = Iterations; ManagedOps.Copy2D(KernelParam, kparam, 0, 0); Type = ktype; var axy = ManagedMatrix.BSXMUL(alpha, dy); var tay = ManagedMatrix.Transpose(axy); var txx = ManagedMatrix.Multiply(tay, dx); W = ManagedMatrix.Transpose(txx); Trained = true; ManagedOps.Free(dx, dy, K, kparam, E, alpha, axy, tay, txx); }
protected bool SetupTestData(string test) { var text = test.Trim(); if (string.IsNullOrEmpty(text)) { return(false); } var TestBuffer = new TextBuffer(new TextTagTable()) { Text = text }; Samples.Value = Convert.ToDouble(TestBuffer.LineCount, ci); var inpx = Convert.ToInt32(InputLayerNodes.Value, ci); var tsty = Convert.ToInt32(Samples.Value, ci); ManagedOps.Free(TestData); TestData = new ManagedArray(inpx, tsty); var current = DelimiterBox.Active; var delimiter = current >= 0 && current < Delimiters.Count ? Delimiters[current].Character : '\t'; var inputs = inpx; using (var reader = new StringReader(TestBuffer.Text)) { for (int y = 0; y < tsty; y++) { var line = reader.ReadLine(); if (!string.IsNullOrEmpty(line)) { var tokens = line.Split(delimiter); if (inputs > 0 && tokens.Length >= inpx) { for (int x = 0; x < inpx; x++) { TestData[x, y] = SafeConvert.ToDouble(tokens[x]); } } } } } NormalizeData(TestData, NormalizationData); UpdateTextView(ViewNormalization, NormalizationData); return(true); }
protected void CleanShutdown() { // Clean-Up Routines Here Network.Free(); ManagedOps.Free(InputData, OutputData, TestData, NormalizationData); Common.Free(PlotImage.Pixbuf); Common.Free(PlotImage); Plot.Free(); }
public static Pixbuf Plot(ManagedArray x, Model model, int width, int height, int f1 = 0, int f2 = 1) { var pixbuf = Common.Pixbuf(width, height, new Color(255, 255, 255)); var m = Rows(x); minx = Double.MaxValue; maxx = Double.MinValue; miny = Double.MaxValue; maxy = Double.MinValue; f1 = f1 >= 0 && f1 < Cols(x) ? f1 : 0; f2 = f2 >= 0 && f2 < Cols(x) ? f2 : 0; for (var j = 0; j < m; j++) { minx = Math.Min(x[f1, j], minx); maxx = Math.Max(x[f1, j], maxx); miny = Math.Min(x[f2, j], miny); maxy = Math.Max(x[f2, j], maxy); } deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; minx = minx - 8 * deltax; maxx = maxx + 8 * deltax; miny = miny - 8 * deltay; maxy = maxy + 8 * deltay; deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; var classification = model.Classify(x); Points(pixbuf, x, classification, f1, f2); // Plot bounding box var cw = pixbuf.Width - 1; var ch = pixbuf.Height; var border = new Color(128, 128, 128); Common.Line(pixbuf, 0, 1, cw, 1, border); Common.Line(pixbuf, cw, 1, cw, ch, border); Common.Line(pixbuf, 0, ch, cw, ch, border); Common.Line(pixbuf, 0, 1, 0, ch, border); ManagedOps.Free(classification); return(pixbuf); }
static double Multiply(ManagedArray x1, ManagedArray x2) { Vectorize(x1, x2); var tx = ManagedMatrix.Transpose(x1); var xx = ManagedMatrix.Multiply(tx, x2); var x = xx[0]; ManagedOps.Free(tx, xx); return(x); }
protected bool SetupInputLayerWeights(string inputlayer) { var text = inputlayer.Trim(); if (string.IsNullOrEmpty(text)) { return(false); } var InputLayerBuffer = new TextBuffer(new TextTagTable()) { Text = text }; var inpx = Convert.ToInt32(InputLayerNodes.Value, ci) + 1; var inpy = Convert.ToInt32(HiddenLayerNodes.Value, ci); if (inpx < 2 || inpy < 2 || inpy != InputLayerBuffer.LineCount) { return(false); } ManagedOps.Free(Network.Wji); Network.Wji = new ManagedArray(inpx, inpy); var current = DelimiterBox.Active; var delimiter = current >= 0 && current < Delimiters.Count ? Delimiters[current].Character : '\t'; using (var reader = new StringReader(InputLayerBuffer.Text)) { for (int y = 0; y < inpy; y++) { var line = reader.ReadLine(); if (line != null) { var tokens = line.Split(delimiter); for (int x = 0; x < inpx; x++) { if (x < tokens.Length) { Network.Wji[x, y] = SafeConvert.ToDouble(tokens[x]); } } } } } return(true); }
protected bool SetupNormalization(string normalization) { var text = normalization.Trim(); if (string.IsNullOrEmpty(text)) { return(false); } var NormalizationBuffer = new TextBuffer(new TextTagTable()) { Text = text }; var nrmx = Convert.ToInt32(InputLayerNodes.Value, ci); var nrmy = 2; if (nrmx < 2 || nrmy < 2 || NormalizationBuffer.LineCount < nrmy) { return(false); } ManagedOps.Free(NormalizationData); NormalizationData = new ManagedArray(nrmx, nrmy); var current = DelimiterBox.Active; var delimiter = current >= 0 && current < Delimiters.Count ? Delimiters[current].Character : '\t'; using (var reader = new StringReader(NormalizationBuffer.Text)) { for (int y = 0; y < nrmy; y++) { var line = reader.ReadLine(); if (line != null) { var tokens = line.Split(delimiter); for (int x = 0; x < nrmx; x++) { if (x < tokens.Length) { NormalizationData[x, y] = SafeConvert.ToDouble(tokens[x]); } } } } } return(true); }
protected bool SetupHiddenLayerWeights(string hiddenLayer) { var text = hiddenLayer.Trim(); if (string.IsNullOrEmpty(text)) { return(false); } var HiddenLayerBuffer = new TextBuffer(new TextTagTable()) { Text = text }; var hidx = Convert.ToInt32(HiddenLayerNodes.Value, ci) + 1; var hidy = Convert.ToInt32(Categories.Value, ci); if (hidx < 2 || hidy < 1 || hidy != HiddenLayerBuffer.LineCount) { return(false); } ManagedOps.Free(Network.Wkj); Network.Wkj = new ManagedArray(hidx, hidy); var current = DelimiterBox.Active; var delimiter = current >= 0 && current < Delimiters.Count ? Delimiters[current].Character : '\t'; using (var reader = new StringReader(HiddenLayerBuffer.Text)) { for (int y = 0; y < hidy; y++) { var line = reader.ReadLine(); if (line != null) { var tokens = line.Split(delimiter); for (int x = 0; x < hidx; x++) { if (x < tokens.Length) { Network.Wkj[x, y] = SafeConvert.ToDouble(tokens[x]); } } } } } return(true); }
public ManagedIntList Classify(ManagedArray input, double threshold = 0) { var classification = new ManagedIntList(Rows(input)); var predictions = Predict(input); for (var i = 0; i < predictions.Length(); i++) { classification[i] = predictions[i] > threshold ? Category : 0; } ManagedOps.Free(predictions); return(classification); }
public static Bitmap Get(ManagedCNN cnn, int layer, bool transpose = true) { if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution) { if (transpose) { var Transposed = new ManagedArray(cnn.Layers[layer].Bias, false); ManagedMatrix.Transpose(Transposed, cnn.Layers[layer].Bias); var bitmap = new Bitmap(Transposed.x, Transposed.y, PixelFormat.Format24bppRgb); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; GetNormalization(Transposed, ref min, ref max); Draw(bitmap, Transposed, min, max); ManagedOps.Free(Transposed); return(bitmap); } else { var bitmap = new Bitmap(cnn.Layers[layer].Bias.x, cnn.Layers[layer].Bias.y, PixelFormat.Format24bppRgb); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; GetNormalization(cnn.Layers[layer].Bias, ref min, ref max); Draw(bitmap, cnn.Layers[layer].Bias, min, max); return(bitmap); } } // return empty bitmap return(new Bitmap(1, 1, PixelFormat.Format24bppRgb)); }
public static Bitmap Get(ManagedCNN cnn, int layer, int i, int j) { if (layer >= 0 && layer < cnn.Layers.Count && i >= 0 && i < cnn.Layers[layer].FeatureMap.i && j >= 0 && j < cnn.Layers[layer].FeatureMap.j) { var FeatureMap = new ManagedArray(cnn.Layers[layer].FeatureMap.x, cnn.Layers[layer].FeatureMap.y, cnn.Layers[layer].FeatureMap.z); var Transposed = new ManagedArray(FeatureMap); var bitmap = new Bitmap(cnn.Layers[layer].FeatureMap.x, cnn.Layers[layer].FeatureMap.y, PixelFormat.Format24bppRgb); ManagedOps.Copy4DIJ2D(FeatureMap, cnn.Layers[layer].FeatureMap, i, j); ManagedMatrix.Transpose(Transposed, FeatureMap); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; for (int y = 0; y < Transposed.y; y++) { for (int x = 0; x < Transposed.x; x++) { if (Transposed[x, y] > max) { max = Transposed[x, y]; } if (Transposed[x, y] < min) { min = Transposed[x, y]; } } } Draw(bitmap, Transposed, min, max); ManagedOps.Free(FeatureMap, Transposed); return(bitmap); } // return empty bitmap return(new Bitmap(1, 1, PixelFormat.Format24bppRgb)); }
public static Pixbuf Get(ManagedCNN cnn, int layer, int map) { if (layer >= 0 && layer < cnn.Layers.Count && map >= 0 && map < cnn.Layers[layer].Activation.i) { var Activation = new ManagedArray(cnn.Layers[layer].Activation.x, cnn.Layers[layer].Activation.y, cnn.Layers[layer].Activation.z); var Transposed = new ManagedArray(Activation); var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, Activation.y, Activation.x); ManagedOps.Copy4D2D(Activation, cnn.Layers[layer].Activation, 0, map); ManagedMatrix.Transpose(Transposed, Activation); // Get normalization values double min = Double.MaxValue; double max = double.MinValue; for (int y = 0; y < Transposed.y; y++) { for (int x = 0; x < Transposed.x; x++) { if (Transposed[x, y] > max) { max = Transposed[x, y]; } if (Transposed[x, y] < min) { min = Transposed[x, y]; } } } Draw(pixbuf, Transposed, min, max); ManagedOps.Free(Activation, Transposed); return(pixbuf); } // return empty pixbuf return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1)); }
public static Bitmap Get(ManagedCNN cnn, int layer, int map) { if (layer >= 0 && layer < cnn.Layers.Count && map >= 0 && map < cnn.Layers[layer].Activation.i) { var Activation = new ManagedArray(cnn.Layers[layer].Activation.x, cnn.Layers[layer].Activation.y, cnn.Layers[layer].Activation.z); var Transposed = new ManagedArray(Activation); var bitmap = new Bitmap(cnn.Layers[layer].Activation.x, cnn.Layers[layer].Activation.y, PixelFormat.Format24bppRgb); ManagedOps.Copy4D2D(Activation, cnn.Layers[layer].Activation, 0, map); ManagedMatrix.Transpose(Transposed, Activation); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; for (int y = 0; y < Transposed.y; y++) { for (int x = 0; x < Transposed.x; x++) { if (Transposed[x, y] > max) { max = Transposed[x, y]; } if (Transposed[x, y] < min) { min = Transposed[x, y]; } } } Draw(bitmap, Transposed, min, max); ManagedOps.Free(Activation, Transposed); return(bitmap); } // return empty bitmap return(new Bitmap(1, 1, PixelFormat.Format24bppRgb)); }
public static Bitmap Get(ManagedArray layer, bool transpose = true) { Console.WriteLine("Layer dimensions: {0} {1}", layer.x, layer.y); if (transpose) { var Transposed = new ManagedArray(layer, false); ManagedMatrix.Transpose(Transposed, layer); var bitmap = new Bitmap(Transposed.x, Transposed.y, PixelFormat.Format24bppRgb); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; GetNormalization(Transposed, ref min, ref max); Draw(bitmap, Transposed, min, max); ManagedOps.Free(Transposed); return(bitmap); } else { var bitmap = new Bitmap(layer.x, layer.y, PixelFormat.Format24bppRgb); // Get normalization values double min = Double.MaxValue; double max = Double.MinValue; GetNormalization(layer, ref min, ref max); Draw(bitmap, layer, min, max); return(bitmap); } }
protected bool SetupInputData(string training) { var text = training.Trim(); if (string.IsNullOrEmpty(text)) { return(false); } var TrainingBuffer = new TextBuffer(new TextTagTable()) { Text = text }; Examples.Value = Convert.ToDouble(TrainingBuffer.LineCount, ci); var inpx = Convert.ToInt32(InputLayerNodes.Value, ci); var inpy = Convert.ToInt32(Examples.Value, ci); ManagedOps.Free(InputData, OutputData, NormalizationData); InputData = new ManagedArray(inpx, inpy); NormalizationData = new ManagedArray(inpx, 2); OutputData = new ManagedArray(1, inpy); int min = 0; int max = 1; for (int x = 0; x < inpx; x++) { NormalizationData[x, min] = double.MaxValue; NormalizationData[x, max] = double.MinValue; } var current = DelimiterBox.Active; var delimiter = current >= 0 && current < Delimiters.Count ? Delimiters[current].Character : '\t'; var inputs = inpx; using (var reader = new StringReader(TrainingBuffer.Text)) { for (int y = 0; y < inpy; y++) { var line = reader.ReadLine(); if (!string.IsNullOrEmpty(line)) { var tokens = line.Split(delimiter); if (inputs > 0 && tokens.Length > inputs) { OutputData[0, y] = SafeConvert.ToDouble(tokens[inputs]); for (int x = 0; x < inpx; x++) { var data = SafeConvert.ToDouble(tokens[x]); NormalizationData[x, min] = data < NormalizationData[x, min] ? data : NormalizationData[x, min]; NormalizationData[x, max] = data > NormalizationData[x, max] ? data : NormalizationData[x, max]; InputData[x, y] = data; } } } } } NormalizeData(InputData, NormalizationData); UpdateTextView(Normalization, NormalizationData); return(true); }
public void Setup(ManagedArray x, ManagedArray y, double c, KernelType kernel, ManagedArray param, double tolerance = 0.001, int maxpasses = 5, int category = 1) { ManagedOps.Free(dx, dy); dx = new ManagedArray(x); dy = new ManagedArray(y); ManagedOps.Copy2D(dx, x, 0, 0); ManagedOps.Copy2D(dy, y, 0, 0); ktype = kernel; // Data parameters var m = Rows(dx); Category = category; MaxIterations = maxpasses; Tolerance = tolerance; C = c; // Reset internal variables ManagedOps.Free(K, kparam, E, alpha); kparam = new ManagedArray(param); ManagedOps.Copy2D(kparam, param, 0, 0); // Variables alpha = new ManagedArray(1, m); E = new ManagedArray(1, m); b = 0; Iterations = 0; // Pre-compute the Kernel Matrix since our dataset is small // (In practice, optimized SVM packages that handle large datasets // gracefully will *not* do this) if (kernel == KernelType.LINEAR) { // Computation for the Linear Kernel // This is equivalent to computing the kernel on every pair of examples var tinput = ManagedMatrix.Transpose(dx); K = ManagedMatrix.Multiply(dx, tinput); double slope = kparam.Length() > 0 ? kparam[0] : 1; double inter = kparam.Length() > 1 ? kparam[1] : 0; ManagedMatrix.Multiply(K, slope); ManagedMatrix.Add(K, inter); ManagedOps.Free(tinput); } else if (kernel == KernelType.GAUSSIAN || kernel == KernelType.RADIAL) { // RBF Kernel // This is equivalent to computing the kernel on every pair of examples var pX2 = ManagedMatrix.Pow(dx, 2); var rX2 = ManagedMatrix.RowSums(pX2); var tX2 = ManagedMatrix.Transpose(rX2); var trX = ManagedMatrix.Transpose(dx); var tempK = new ManagedArray(m, m); var temp1 = new ManagedArray(m, m); var temp2 = ManagedMatrix.Multiply(dx, trX); ManagedMatrix.Expand(rX2, m, 1, tempK); ManagedMatrix.Expand(tX2, 1, m, temp1); ManagedMatrix.Multiply(temp2, -2); ManagedMatrix.Add(tempK, temp1); ManagedMatrix.Add(tempK, temp2); double sigma = kparam.Length() > 0 ? kparam[0] : 1; var g = Math.Abs(sigma) > 0 ? Math.Exp(-1 / (2 * sigma * sigma)) : 0; if (Type == KernelType.RADIAL) { ManagedMatrix.Sqrt(tempK); } K = ManagedMatrix.Pow(g, tempK); ManagedOps.Free(pX2, rX2, tX2, trX, tempK, temp1, temp2); } else { // Pre-compute the Kernel Matrix // The following can be slow due to the lack of vectorization K = new ManagedArray(m, m); var Xi = new ManagedArray(Cols(dx), 1); var Xj = new ManagedArray(Cols(dx), 1); for (var i = 0; i < m; i++) { ManagedOps.Copy2D(Xi, dx, 0, i); for (var j = 0; j < m; j++) { ManagedOps.Copy2D(Xj, dx, 0, j); K[j, i] = KernelFunction.Run(kernel, Xi, Xj, kparam); // the matrix is symmetric K[i, j] = K[j, i]; } } ManagedOps.Free(Xi, Xj); } eta = 0; L = 0; H = 0; // Map 0 (or other categories) to -1 for (var i = 0; i < Rows(dy); i++) { dy[i] = (int)dy[i] != Category ? -1 : 1; } }
// SVMPREDICT returns a vector of predictions using a trained SVM model //(svm_train). // // pred = SVMPREDICT(model, X) returns a vector of predictions using a // trained SVM model (svm_train). X is a mxn matrix where there each // example is a row. model is a svm model returned from svm_train. // predictions pred is a m x 1 column of predictions of {0, 1} values. // // Converted to R by: SD Separa (2016/03/18) // Converted to C# by: SD Separa (2018/09/29) public ManagedArray Predict(ManagedArray input) { var predictions = new ManagedArray(1, Rows(input)); if (Trained) { var x = new ManagedArray(input); if (Cols(x) == 1) { ManagedMatrix.Transpose(x, input); } else { ManagedOps.Copy2D(x, input, 0, 0); } var m = Rows(x); predictions.Resize(1, m); if (Type == KernelType.LINEAR) { ManagedMatrix.Multiply(predictions, x, W); ManagedMatrix.Add(predictions, B); } else if (Type == KernelType.GAUSSIAN || Type == KernelType.RADIAL) { // RBF Kernel // This is equivalent to computing the kernel on every pair of examples var pX1 = ManagedMatrix.Pow(x, 2); var pX2 = ManagedMatrix.Pow(ModelX, 2); var rX2 = ManagedMatrix.RowSums(pX2); var X1 = ManagedMatrix.RowSums(pX1); var X2 = ManagedMatrix.Transpose(rX2); var tX = ManagedMatrix.Transpose(ModelX); var tY = ManagedMatrix.Transpose(ModelY); var tA = ManagedMatrix.Transpose(Alpha); var rows = Rows(X1); var cols = Cols(X2); var tempK = new ManagedArray(cols, rows); var temp1 = new ManagedArray(cols, rows); var temp2 = ManagedMatrix.Multiply(x, tX); ManagedMatrix.Multiply(temp2, -2); ManagedMatrix.Expand(X1, cols, 1, tempK); ManagedMatrix.Expand(X2, 1, rows, temp1); ManagedMatrix.Add(tempK, temp1); ManagedMatrix.Add(tempK, temp2); var sigma = KernelParam.Length() > 0 ? KernelParam[0] : 1; if (Type == KernelType.RADIAL) { ManagedMatrix.Sqrt(tempK); } var g = Math.Abs(sigma) > 0 ? Math.Exp(-1 / (2 * sigma * sigma)) : 0; var Kernel = ManagedMatrix.Pow(g, tempK); var tempY = new ManagedArray(Cols(tY), rows); var tempA = new ManagedArray(Cols(tA), rows); ManagedMatrix.Expand(tY, 1, rows, tempY); ManagedMatrix.Expand(tA, 1, rows, tempA); ManagedMatrix.Product(Kernel, tempY); ManagedMatrix.Product(Kernel, tempA); var p = ManagedMatrix.RowSums(Kernel); ManagedOps.Copy2D(predictions, p, 0, 0); ManagedMatrix.Add(predictions, B); ManagedOps.Free(pX1, pX2, rX2, X1, X2, tempK, temp1, temp2, tX, tY, tA, tempY, tempA, Kernel, p); } else { var Xi = new ManagedArray(Cols(x), 1); var Xj = new ManagedArray(Cols(ModelX), 1); for (var i = 0; i < m; i++) { double prediction = 0; ManagedOps.Copy2D(Xi, x, 0, i); for (var j = 0; j < Rows(ModelX); j++) { ManagedOps.Copy2D(Xj, ModelX, 0, j); prediction += Alpha[j] * ModelY[j] * KernelFunction.Run(Type, Xi, Xj, KernelParam); } predictions[i] = prediction + B; } ManagedOps.Free(Xi, Xj); } ManagedOps.Free(x); } return(predictions); }
public static Pixbuf Contour(ManagedDNN network, NeuralNetworkOptions opts, double threshold, ManagedArray x, int width, int height, int f1 = 0, int f2 = 0) { InitializeContour(11, width, height); var m = Rows(x); var xplot = new double[width]; var yplot = new double[height]; var data = new double[height, width]; minx = double.MaxValue; maxx = double.MinValue; miny = double.MaxValue; maxy = double.MinValue; f1 = f1 >= 0 && f1 < Cols(x) ? f1 : 0; f2 = f2 >= 0 && f2 < Cols(x) ? f2 : 0; for (var j = 0; j < m; j++) { minx = Math.Min(x[f1, j], minx); maxx = Math.Max(x[f1, j], maxx); miny = Math.Min(x[f2, j], miny); maxy = Math.Max(x[f2, j], maxy); } deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; minx = minx - 8 * deltax; maxx = maxx + 8 * deltax; miny = miny - 8 * deltay; maxy = maxy + 8 * deltay; deltax = (maxx - minx) / width; deltay = (maxy - miny) / height; // For predict for (var i = 0; i < width; i++) { xplot[i] = minx + i * deltax; } for (var i = 0; i < height; i++) { yplot[i] = miny + i * deltay; } var xx = new ManagedArray(2, height); for (var i = 0; i < width; i++) { for (var j = 0; j < height; j++) { xx[f1, j] = xplot[i]; xx[f2, j] = yplot[j]; } var p = network.Predict(xx, opts); for (var j = 0; j < height; j++) { data[i, j] = p[j]; } ManagedOps.Free(p); } var z = new double[] { 0.6, 0.8, 1 }; Conrec.Contour(data, xplot, yplot, z, ContourLine); Points(ContourGraph, network, opts, threshold, x, width, height, f1, f2); ManagedOps.Free(xx); var border = new Color(128, 128, 128); // Plot bounding box var cw = ContourGraph.Width - 1; var ch = ContourGraph.Height; Common.Line(ContourGraph, 0, 1, cw, 1, border); Common.Line(ContourGraph, cw, 1, cw, ch, border); Common.Line(ContourGraph, 0, ch, cw, ch, border); Common.Line(ContourGraph, 0, 1, 0, ch, border); return(ContourGraph); }
public void Free() { ManagedOps.Free(FeatureMap, DeltaFeatureMap); ManagedOps.Free(Activation, Delta); ManagedOps.Free(Bias, DeltaBias); }