public LogisticRegression GradientDescent(int iterations, float learningRate, float lambda = 0.1f, Func <float, bool> costCallback = null) { var theta = _lap.CreateVector(_feature.ColumnCount, 0f); for (var i = 0; i < iterations; i++) { if (costCallback != null) { var cost = ComputeCost(theta, lambda); if (!costCallback(cost)) { break; } } using (var d = _Derivative(theta, lambda)) { d.Multiply(learningRate); var theta2 = theta.Subtract(d); theta.Dispose(); theta = theta2; } } var ret = new LogisticRegression { Theta = theta.Data }; theta.Dispose(); return(ret); }
public void TestConstrainInput() { var input = _cpu.CreateVector(new[] { -1.5f, -1f, -0.5f, 0, 0.5f, 1f, 1.5f }).ReshapeAsMatrix(1, 7); var output = _cpu.CreateVector(new[] { -1f, -1f, -0.5f, 0, 0.5f, 1f, 1f }).ReshapeAsMatrix(1, 7); _TestAction(_factory.GraphAction.Constrain(-1f, 1f), input.AsGraphData(), output.AsGraphData()); }
public VectorBasedStatistics(ILinearAlgebraProvider lap, int size, float[] mean, float[] m2, int count) { _size = size; _count = count; _mean = mean != null?lap.CreateVector(mean) : lap.CreateVector(size, 0f); _m2 = m2 != null?lap.CreateVector(m2) : lap.CreateVector(size, 0f); }
public void TestKMeans() { var stringTableBuilder = new StringTableBuilder(); var data = NaiveBayesTests.GetSimpleChineseSet(stringTableBuilder). ConvertToWeightedIndexList(false).Vectorise(). ToDictionary(d => _lap.CreateVector(d.Data), d => d.Classification); var clusters = data.Select(d => d.Key).ToList().KMeans(_lap, 2); var clusterLabels = clusters.Select(d => d.Select(d2 => data[d2]).ToArray()).ToList(); }
public void TestMatrixCreation() { var values = new[] { Enumerable.Range(0, 10).Select(v => (float)v).ToArray(), Enumerable.Range(0, 10).Select(v => (float)v * 2).ToArray(), Enumerable.Range(0, 10).Select(v => (float)v * 3).ToArray(), }; var cpuRowList = values.Select(v => _cpu.CreateVector(v)).ToList(); var cpuMatrix = _cpu.CreateMatrix(cpuRowList); var gpuRowList = values.Select(v => _cuda.CreateVector(v)).ToList(); using (var gpuMatrix = _cuda.CreateMatrix(gpuRowList)) { FloatingPointHelper.AssertEqual(cpuMatrix.AsIndexable(), gpuMatrix.AsIndexable()); } gpuRowList.ForEach(v => v.Dispose()); }
public void GetNumericRows() { var builder = BrightWireProvider.CreateDataTableBuilder(); builder.AddColumn(ColumnType.Float, "val1"); builder.AddColumn(ColumnType.Double, "val2"); builder.AddColumn(ColumnType.String, "cls", true); builder.Add(0.5f, 1.1, "a"); builder.Add(0.2f, 1.5, "b"); builder.Add(0.7f, 0.5, "c"); builder.Add(0.2f, 0.6, "d"); var table = builder.Build(); var rows = table.GetNumericRows(new[] { 1 }).Select(r => _lap.CreateVector(r)). Select(r => r.AsIndexable()).ToList(); Assert.AreEqual(rows[0][0], 1.1f); Assert.AreEqual(rows[1][0], 1.5f); }
/// <summary> /// Create a vector /// </summary> /// <param name="lap"></param> /// <param name="data">Vector to copy</param> /// <returns></returns> public static IVector CreateVector(this ILinearAlgebraProvider lap, FloatVector data) { //var array = data.Data; //return lap.CreateVector(array.Length, i => array[i]); var ret = lap.CreateVector(data.Count); ret.Data = data; return(ret); }
// normal method removed until GPU provider can properly calculate matrix inverses! //public LinearRegression Solve() //{ // // solve using normal method // using (var lambdaMatrix = _lap.CreateIdentityMatrix(_feature.ColumnCount)) // using (var zero = _lap.CreateVector(1, 0f)) { // lambdaMatrix.UpdateColumn(0, zero.AsIndexable(), 0); // using (var featureTranspose = _feature.Transpose()) // using (var pinv = featureTranspose.Multiply(_feature)) // using (var pinv2 = pinv.Add(lambdaMatrix)) // using (var pinv3 = pinv2.Inverse()) // using (var tc = _target.ReshapeAsColumnMatrix()) // using (var a2 = featureTranspose.Multiply(tc)) // using (var ret = pinv3.Multiply(a2)) // using (var theta = ret.Column(0)) { // return new LinearRegression { // Theta = theta.Data // }; // } // } //} public LinearRegression GradientDescent(int iterations, float learningRate, float lambda = 0.1f, Func <float, bool> costCallback = null) { var regularisation = 1f - (learningRate * lambda) / _feature.RowCount; var theta = _lap.CreateVector(_feature.ColumnCount, 0f); using (var regularisationVector = _lap.CreateVector(theta.Count, i => i == 0 ? 1f : regularisation)) { for (var i = 0; i < iterations; i++) { if (costCallback != null) { var cost = ComputeCost(theta, lambda); if (!costCallback(cost)) { break; } } using var p = _feature.Multiply(theta); using var pc = p.Column(0); using var e = pc.Subtract(_target); using var e2 = e.ReshapeAsRowMatrix(); using var d = e2.Multiply(_feature); using var delta = d.Row(0); delta.Multiply(learningRate); using var temp = theta.PointwiseMultiply(regularisationVector); var theta2 = temp.Subtract(delta); theta.Dispose(); theta = theta2; } } var ret = new LinearRegression { Theta = theta.Data }; theta.Dispose(); return(ret); }
public KNNClassifier(ILinearAlgebraProvider lap, KNearestNeighbours model, int k, DistanceMetric distanceMetric = DistanceMetric.Euclidean) { _k = k; _lap = lap; _model = model; _distanceMetric = distanceMetric; for (int i = 0, len = model.Instance.Length; i < len; i++) { _instance.Add(lap.CreateVector(model.Instance[i].Data)); } }
public LogisticRegressionTrainer(ILinearAlgebraProvider lap, IDataTable table) { _lap = lap; var numRows = table.RowCount; var classColumnIndex = table.TargetColumnIndex; int numCols = table.ColumnCount; var featureColumns = Enumerable.Range(0, numCols).Where(c => c != classColumnIndex).ToList(); var data = table.GetNumericColumns(featureColumns); _feature = lap.CreateMatrix(numRows, numCols, (i, j) => j == 0 ? 1 : data[j - 1][i]); _target = lap.CreateVector(table.GetColumn <float>(classColumnIndex)); }
public Convolutional(ILinearAlgebraProvider lap, ConvolutionalNetwork.Layer layer) { _lap = lap; _padding = layer.Padding; _filterWidth = layer.FilterWidth; _filterHeight = layer.FilterHeight; _stride = layer.Stride; var activation = lap.NN.GetActivation(layer.Data.Activation); _layer = new StandardFeedForward(lap.CreateMatrix(layer.Data.Weight), lap.CreateVector(layer.Data.Bias), activation); }
public void TensorCreateFromVector() { const int DEPTH = 3, ROWS = 4, COLUMNS = 4; var cpuTensor = _cpu.Create3DTensor(Enumerable.Range(0, DEPTH).Select(i => _cpu.CreateMatrix(ROWS, COLUMNS, (j, k) => (i + 1) * (j + 1) * (k + 1))).ToList()); var cpuVector = cpuTensor.ConvertToVector(); var cpuTensor2 = cpuVector.ConvertTo3DTensor(ROWS, COLUMNS, DEPTH); FloatingPointHelper.AssertEqual(cpuTensor.AsIndexable(), cpuTensor2.AsIndexable()); using (var gpuVector = _cuda.CreateVector(cpuVector.AsIndexable())) using (var gpuTensor2 = gpuVector.ConvertTo3DTensor(ROWS, COLUMNS, DEPTH)) { FloatingPointHelper.AssertEqual(cpuTensor.AsIndexable(), gpuTensor2.AsIndexable()); } }
IEnumerable <Tuple <string, float> > _Classify(IRow row) { // encode the features into a vector var featureCount = _model.FeatureColumn.Length; var features = new float[featureCount]; for (var i = 0; i < featureCount; i++) { features[i] = row.GetField <float>(_model.FeatureColumn[i]); } // find the k closest neighbours and score the results based on proximity to rank the classifications using var vector = _lap.CreateVector(features); var distances = vector.FindDistances(_instance, _distanceMetric).AsIndexable(); return(distances.Values.Zip(_model.Classification, (s, l) => Tuple.Create(l, s)). OrderBy(d => d.Item2).Take(_k).GroupBy(d => d.Item1). Select(g => Tuple.Create(g.Key, g.Sum(d => 1f / d.Item2)))); }
public void TestCosineDistance() { var distribution = new Normal(0, 5); var vectors = Enumerable.Range(0, 10).Select(i => _cpu.CreateVector(100, j => Convert.ToSingle(distribution.Sample())).AsIndexable()).ToList(); var compareTo = Enumerable.Range(0, 20).Select(i => _cpu.CreateVector(100, j => Convert.ToSingle(distribution.Sample())).AsIndexable()).ToList(); var distances = _cpu.CalculateDistances(vectors, compareTo, DistanceMetric.Cosine); var gpuVectors = vectors.Select(v => _cuda.CreateVector(v)).ToList(); var gpuCompareTo = compareTo.Select(v => _cuda.CreateVector(v)).ToList(); var gpuDistances = _cuda.CalculateDistances(gpuVectors, gpuCompareTo, DistanceMetric.Cosine); FloatingPointHelper.AssertEqual(distances.AsIndexable(), gpuDistances.AsIndexable()); }
/// <summary> /// Create a vector /// </summary> /// <param name="lap"></param> /// <param name="length">Vector size</param> /// <param name="value">Constant value</param> /// <returns></returns> public static IVector CreateVector(this ILinearAlgebraProvider lap, int length, float value = 0f) { return(lap.CreateVector(length, i => value)); }
public float Predict(params float[] vals) { var v = _lap.CreateVector(vals.Length + 1, i => i == 0 ? 1 : vals[i - 1]); return(v.DotProduct(_theta)); }
/// <summary> /// Creates a classifier from this model /// </summary> /// <param name="lap">Linear algebra provider</param> public ILogisticRegressionClassifier CreatePredictor(ILinearAlgebraProvider lap) { return(new LogisticRegressionPredictor(lap, lap.CreateVector(Theta.Data))); }
public void TestVectorCreation() { Load(); var values = Enumerable.Range(0, 10).Select(v => (float)v).ToList(); var a = _cpu.CreateVector(values).AsIndexable(); Assert.AreEqual(a[4], 4f); Assert.AreEqual(a[0], 0f); Assert.AreEqual(a[9], 9f); IIndexableVector gpuResults; using (var gpuA = _cuda.CreateVector(values)) gpuResults = gpuA.AsIndexable(); FloatingPointHelper.AssertEqual(gpuResults, a); Cleanup(); }
public IVector CreateBias(int size) { return(_lap.CreateVector(size, _biasValue)); }
public IVector CreateBias(int size) { return(_lap.CreateVector(size, x => _GetBias())); }
/// <summary> /// Creates a predictor from this model /// </summary> /// <param name="lap">The linear algebra provider</param> public ILinearRegressionPredictor CreatePredictor(ILinearAlgebraProvider lap) { return(new RegressionPredictor(lap, lap.CreateVector(Theta.Data))); }
/// <summary> /// Create a vector /// </summary> /// <param name="lap"></param> /// <param name="data">Vector to copy</param> /// <returns></returns> public static IVector CreateVector(this ILinearAlgebraProvider lap, FloatVector data) { var array = data.Data; return(lap.CreateVector(array.Length, i => array[i])); }
public IVector CreateBias(int size) { return(_lap.CreateVector(size, 0f)); }
public IVector CreateVector(FloatArray data) { return(Create((IIndexableVector)_numerics.CreateVector(data))); }
/// <summary> /// Creates a vector based on an enumerable of floats /// </summary> /// <param name="lap"></param> /// <param name="data">The initial values in the vector</param> /// <returns></returns> public static IVector CreateVector(this ILinearAlgebraProvider lap, IEnumerable <float> data) { var list = data.ToList(); return(lap.CreateVector(list.Count, i => list[i])); }
/// <summary> /// Create a vector /// </summary> /// <param name="lap"></param> /// <param name="data">Indexable vector to copy</param> /// <returns></returns> public static IVector CreateVector(this ILinearAlgebraProvider lap, IIndexableVector data) { return(lap.CreateVector(data.Count, i => data[i])); }
/// <summary> /// Create a vector /// </summary> /// <param name="lap"></param> /// <param name="data">List of values</param> /// <returns></returns> public static IVector CreateVector(this ILinearAlgebraProvider lap, IReadOnlyList <float> data) { return(lap.CreateVector(data.Count, i => data[i])); }
/// <summary> /// Create a vector /// </summary> /// <param name="lap"></param> /// <param name="data">Array of values</param> /// <returns></returns> public static IVector CreateVector(this ILinearAlgebraProvider lap, float[] data) { return(lap.CreateVector(data.Length, i => data[i])); }