Exemple #1
0
        public IReadOnlyList <IRecurrentOutput> Execute(IReadOnlyList <float[]> inputData)
        {
            var context = new List <IDisposableMatrixExecutionLine>();

            context.Add(new DisposableMatrixExecutionLine());
            context.Add(new DisposableMatrixExecutionLine());

            var ret = new List <RecurrentOutput>();

            using (var m2 = _initialMemory.ToRowMatrix()) {
                context[1].Assign(m2);
                foreach (var item in inputData)
                {
                    using (var curr = _lap.Create(item))
                        using (var curr2 = curr.ToRowMatrix()) {
                            context[0].Assign(curr2);

                            foreach (var action in _layer)
                            {
                                action.Activate(context);
                            }
                            var memoryOutput = context[1].Current.AsIndexable().Rows.First();

                            var output = context[0].Current.Row(0).AsIndexable();
                            ret.Add(new RecurrentOutput(output, memoryOutput));
                        }
                }
            }
            return(ret);
        }
Exemple #2
0
        public LogisticRegression GradientDescent(int iterations, float learningRate, float lambda = 0.1f, Func <float, bool> costCallback = null)
        {
            var theta = _lap.Create(_feature.ColumnCount, 0f);

            for (var i = 0; i < iterations; i++)
            {
                if (costCallback != null)
                {
                    var cost = ComputeCost(theta, lambda);
                    if (!costCallback(cost))
                    {
                        break;
                    }
                }
                using (var d = _Derivative(theta, lambda)) {
                    d.Multiply(learningRate);
                    var theta2 = theta.Subtract(d);
                    theta.Dispose();
                    theta = theta2;
                }
            }
            var ret = new LogisticRegression {
                Theta = theta.Data
            };

            theta.Dispose();
            return(ret);
        }
Exemple #3
0
        public IMiniBatch GetTrainingData(IReadOnlyList <int> rows)
        {
            var input  = _lap.Create(rows.Count, _inputSize, (x, y) => Get(rows[x], y));
            var output = _lap.Create(rows.Count, _outputSize, (x, y) => GetPrediction(rows[x], y));

            return(new MiniBatch(input, output));
        }
Exemple #4
0
        public IMiniBatch GetTrainingData(IReadOnlyList <int> rows)
        {
            var batchRow = _table.GetRows(rows).Select(r => _Convert(r)).ToList();
            var input    = _lap.Create(batchRow.Count, _inputSize, (x, y) => batchRow[x].Item1[y]);
            var output   = _lap.Create(batchRow.Count, _outputSize, (x, y) => batchRow[x].Item2[y]);

            return(new MiniBatch(input, output));
        }
Exemple #5
0
        public Standard(ILinearAlgebraProvider lap, int inputSize, int outputSize, LayerDescriptor init, IActivationFunction activation, IWeightInitialisation weightInit)
        {
            _descriptor = init;
            _activation = activation;

            // initialise weights and bias
            _bias   = lap.Create(outputSize, x => weightInit.GetBias());
            _weight = lap.Create(inputSize, outputSize, (x, y) => weightInit.GetWeight(inputSize, outputSize, x, y));
        }
        public void TestRandomProjection()
        {
            var a           = _lap.Create(256, 256, (x, y) => x * y).AsIndexable();
            var projector   = _lap.CreateRandomProjection(256, 32);
            var projections = projector.Compute(a);

            Assert.IsTrue(projections.ColumnCount == 32);
            Assert.IsTrue(projections.RowCount == 256);
        }
Exemple #7
0
        protected void _CreateFilter(IMatrix matrix)
        {
            Debug.Assert(_filter == null);

            // create a row level probability
            //var dropout = Enumerable.Range(0, matrix.ColumnCount).Select(v => _probabilityDistribution.Sample() / _invertedMultiplier).ToArray();

            // create a filter against the dropout probability
            _filter = _lap.Create(matrix.RowCount, matrix.ColumnCount, (x, y) => _probabilityDistribution.Sample() / _invertedMultiplier);
        }
Exemple #8
0
        public RegressionTrainer(ILinearAlgebraProvider lap, IDataTable table)
        {
            _lap = lap;
            var numRows          = table.RowCount;
            var numCols          = table.ColumnCount;
            int classColumnIndex = table.TargetColumnIndex;

            var data = table.GetNumericColumns(Enumerable.Range(0, numCols).Where(c => c != classColumnIndex));

            _feature = lap.Create(numRows, numCols, (i, j) => j == 0 ? 1 : data[j - 1][i]);
            _target  = lap.Create(table.GetColumn <float>(classColumnIndex));
        }
        public ISequentialMiniBatch GetTrainingData(int sequenceLength, IReadOnlyList <int> rows)
        {
            var input     = new IMatrix[sequenceLength];
            var output    = new IMatrix[sequenceLength];
            var dataGroup = _inputData[sequenceLength];

            for (var k = 0; k < sequenceLength; k++)
            {
                input[k]  = _lap.Create(rows.Count, _inputSize, (x, y) => dataGroup[rows[x]].Item1[k].Input[y]);
                output[k] = _lap.Create(rows.Count, _outputSize, (x, y) => dataGroup[rows[x]].Item1[k].Output[y]);
            }
            return(new SequentialMiniBatch(input, output, rows.Select(r => dataGroup[r].Item2).ToArray()));
        }
Exemple #10
0
            public InternalLayer(ILinearAlgebraProvider lap, int inputSize, int outputSize, IActivationFunction activation, ConvolutionDescriptor descriptor, bool disableUpdate)
            {
                _inputSize     = inputSize;
                _outputSize    = outputSize;
                _activation    = activation;
                _descriptor    = descriptor;
                _disableUpdate = disableUpdate;

                var weightInit = lap.NN.GetWeightInitialisation(descriptor.WeightInitialisation);

                _bias   = lap.Create(outputSize, x => weightInit.GetBias());
                _weight = lap.Create(inputSize, outputSize, (x, y) => weightInit.GetWeight(inputSize, outputSize, x, y));
            }
        StandardFeedForward _ReadFeedForward(NetworkLayer layer)
        {
            var descriptor = LayerDescriptor.CreateFrom(layer);

            var bias = _lap.Create(layer.OutputSize, 0f);

            bias.Data = layer.Bias;

            var weight = _lap.Create(layer.InputSize, layer.OutputSize, 0f);

            weight.Data = layer.Weight;

            return(new StandardFeedForward(weight, bias, _activation[descriptor.Activation]));
        }
Exemple #12
0
        public INeuralNetworkRecurrentBackpropagation Execute(List <IMatrix> curr, bool backpropagate)
        {
            var input  = curr[0];
            var memory = curr[1];

            var a = Combine(input, memory, _wc.Layer, _uc.Layer, m => _activation.Calculate(m));
            var i = Combine(input, memory, _wi.Layer, _ui.Layer, m => m.SigmoidActivation());
            var f = Combine(input, memory, _wf.Layer, _uf.Layer, m => m.SigmoidActivation());
            var o = Combine(input, memory, _wo.Layer, _uo.Layer, m => m.SigmoidActivation());

            using (var f2 = f.PointwiseMultiply(memory)) {
                var ct = a.PointwiseMultiply(i);
                ct.AddInPlace(f2);
                var cta = _activation.Calculate(ct);

                curr[0] = o.PointwiseMultiply(cta);
                curr[1] = ct;

                if (backpropagate)
                {
                    var ones = _lap.Create(memory.RowCount, memory.ColumnCount, (x, y) => 1f);
                    return(new Backpropagation(_activation, ones, ct, cta, memory, o, a, i, f, input, _uc, _wc, _ui, _wi, _uf, _wf, _uo, _wo));
                }
                //memory.Dispose();
                //input.Dispose();
                a.Dispose();
                i.Dispose();
                f.Dispose();
                o.Dispose();
                cta.Dispose();
                return(null);
            }
        }
Exemple #13
0
        public NNMF(ILinearAlgebraProvider lap, IReadOnlyList <IIndexableVector> data, int numClusters, IErrorMetric costFunction = null)
        {
            _lap          = lap;
            _data         = data;
            _numClusters  = numClusters;
            _costFunction = costFunction ?? ErrorMetricType.RMSE.Create();

            // create the main matrix
            var rand = new Random();

            _dataMatrix = _lap.Create(data.Count, data.First().Count, (x, y) => data[x][y]);

            // create the weights and features
            _weights  = _lap.Create(_dataMatrix.RowCount, _numClusters, (x, y) => Convert.ToSingle(rand.NextDouble()));
            _features = _lap.Create(_numClusters, _dataMatrix.ColumnCount, (x, y) => Convert.ToSingle(rand.NextDouble()));
        }
Exemple #14
0
        public void TiedAutoEncoder()
        {
            const int DATA_SIZE = 1000, REDUCED_SIZE = 200;

            // create some random data
            var rand         = new Random();
            var trainingData = _lap.NN.CreateTrainingDataProvider(Enumerable.Range(0, 100)
                                                                  .Select(i => _lap.Create(DATA_SIZE, v => Convert.ToSingle(rand.NextDouble())))
                                                                  .Select(v => new TrainingExample(v.Data.Data, v.Data.Data))
                                                                  .ToList()
                                                                  );

            var layerTemplate = new LayerDescriptor(0f)
            {
                Activation   = ActivationType.Relu,
                WeightUpdate = WeightUpdateType.RMSprop
            };

            var firstLayer  = _lap.NN.CreateLayer(DATA_SIZE, REDUCED_SIZE, layerTemplate);
            var secondLayer = _lap.NN.CreateTiedLayer(firstLayer, layerTemplate);
            var layers      = new[] {
                _lap.NN.CreateTrainer(firstLayer, layerTemplate),
                _lap.NN.CreateTrainer(secondLayer, layerTemplate)
            };
            var errorMetric = ErrorMetricType.RMSE.Create();

            using (var trainer = _lap.NN.CreateBatchTrainer(layers)) {
                var trainingContext = _lap.NN.CreateTrainingContext(errorMetric, 0.03f, 32);
                trainer.Train(trainingData, 2, trainingContext);
            }
        }
Exemple #15
0
        internal PerWeightUpdateBase(INeuralNetworkLayerUpdater layerUpdater, ILinearAlgebraProvider lap)
        {
            _layerUpdater = layerUpdater;
            var targetWeight = layerUpdater.Layer.Weight;

            _cache = lap.Create(targetWeight.RowCount, targetWeight.ColumnCount, (x, y) => 0f);
        }
Exemple #16
0
 public I3DTensor ExecuteToTensor(I3DTensor tensor)
 {
     using (var output = ExecuteToMatrix(tensor)) {
         // convert the matrix to a tensor
         var sliceList = new List <IMatrix>();
         for (int i = 0, len = output.ColumnCount; i < len; i++)
         {
             using (var vector = output.Column(i)) {
                 var parts       = vector.Split(tensor.ColumnCount);
                 var sliceMatrix = _lap.Create(parts);
                 sliceList.Add(sliceMatrix);
                 foreach (var part in parts)
                 {
                     part.Dispose();
                 }
             }
         }
         var ret = _lap.CreateTensor(sliceList);
         foreach (var slice in sliceList)
         {
             slice.Dispose();
         }
         return(ret);
     }
 }
Exemple #17
0
            public IMatrix Execute(IMatrix error, ITrainingContext context, bool calculateOutput, INeuralNetworkUpdateAccumulator updateAccumulator)
            {
                var matrixList = error.AsIndexable().Columns.Select(v => v.ToArray()).ToList();

                var newMatrixList = new List <IMatrix>();
                Tuple <int, int> newIndex;

                for (var i = 0; i < matrixList.Count; i++)
                {
                    var matrix = matrixList[i];
                    var table  = _indexPosList[i];

                    newMatrixList.Add(_lap.Create(_rows, _columns, (x, y) => {
                        if (table.TryGetValue(Tuple.Create(x, y), out newIndex))
                        {
                            var newIndex2 = newIndex.Item1 * _newRows + newIndex.Item2;
                            return(matrix[newIndex2]);
                        }
                        return(0f);
                    }));
                }
                using (var tensor = _lap.CreateTensor(newMatrixList)) {
                    var ret = tensor.ConvertToMatrix();
                    foreach (var item in newMatrixList)
                    {
                        item.Dispose();
                    }
                    return(ret);
                }
            }
Exemple #18
0
        public I3DTensor ConvertToTensor(IMatrix matrix)
        {
            var sliceList = new List <IMatrix>();

            for (int i = 0, len = matrix.ColumnCount; i < len; i++)
            {
                using (var vector = matrix.Column(i)) {
                    var parts = vector.Split(_inputWidth);
                    //var sliceMatrix = _lap.Create(parts).Transpose();
                    var sliceMatrix = _lap.Create(parts);
                    sliceList.Add(sliceMatrix);
                    foreach (var part in parts)
                    {
                        part.Dispose();
                    }
                }
            }
            var ret = _lap.CreateTensor(sliceList);

            foreach (var slice in sliceList)
            {
                slice.Dispose();
            }
            return(ret);
        }
Exemple #19
0
        public AdamUpdater(INeuralNetworkLayerUpdater layerUpdater, ILinearAlgebraProvider lap, float decay, float decay2) : base(layerUpdater, lap)
        {
            _decay  = decay;
            _decay2 = decay2;
            var targetWeight = layerUpdater.Layer.Weight;

            _cache2 = lap.Create(targetWeight.RowCount, targetWeight.ColumnCount, (x, y) => 0f);
        }
        public IMiniBatch GetTrainingData(IReadOnlyList <int> rows)
        {
            _backpropagation.Clear();

            var rowList    = new List <IVector>();
            var outputList = new List <float[]>();

            foreach (var item in rows)
            {
                var data            = _data[item];
                var tensor          = data.Item1;
                var backpropagation = _isTraining ? new Stack <IConvolutionalLayerBackpropagation>() : null;
                for (int i = 0, len = _layer.Count; i < len - 1; i++)
                {
                    var next = _layer[i].ExecuteToTensor(tensor, backpropagation);
                    if (tensor != data.Item1)
                    {
                        tensor.Dispose();
                    }
                    tensor = next;
                }
                rowList.Add(_layer.Last().ExecuteToVector(tensor, backpropagation));
                if (tensor != data.Item1)
                {
                    tensor.Dispose();
                }

                if (backpropagation != null)
                {
                    _backpropagation.Add(backpropagation);
                }
                outputList.Add(data.Item2);
            }

            var input = _lap.Create(rowList);

            foreach (var item in rowList)
            {
                item.Dispose();
            }

            var output = _lap.Create(rows.Count, _outputSize, (x, y) => outputList[x][y]);

            return(new MiniBatch(input, output));
        }
Exemple #21
0
 public TiedLayer(ILinearAlgebraProvider lap, INeuralNetworkLayer layer, IWeightInitialisation weightInit)
 {
     _inputSize       = layer.OutputSize;
     _outputSize      = layer.InputSize;
     _layer           = layer;
     _weight          = layer.Weight;
     _bias            = lap.Create(_outputSize, x => weightInit.GetBias());
     _weightTranspose = _weight.Transpose();
 }
Exemple #22
0
 public float[] Predict(IReadOnlyList <IReadOnlyList <float> > input)
 {
     using (var feature = _lap.Create(input.Count, input[0].Count + 1, (i, j) => j == 0 ? 1 : input[i][j - 1]))
         using (var h0 = feature.Multiply(_theta))
             using (var h1 = h0.Column(0))
                 using (var h = h1.Sigmoid())
                     using (var h2 = h.AsIndexable()) {
                         return(h2.ToArray());
                     }
 }
Exemple #23
0
        public RandomProjection(ILinearAlgebraProvider lap, int fixedSize, int reducedSize, int s = 3)
        {
            _lap         = lap;
            _fixedSize   = fixedSize;
            _reducedSize = reducedSize;

            var c1           = Math.Sqrt(3);
            var distribution = new Categorical(new[] { 1.0 / (2 * s), 1 - (1.0 / s), 1.0 / (2 * s) });

            _matrix = _lap.Create(fixedSize, reducedSize, (i, j) => Convert.ToSingle((distribution.Sample() - 1) * c1));
        }
Exemple #24
0
        public KNNClassifier(ILinearAlgebraProvider lap, KNearestNeighbours model, int k, DistanceMetric distanceMetric = DistanceMetric.Euclidean)
        {
            _k              = k;
            _lap            = lap;
            _model          = model;
            _distanceMetric = distanceMetric;

            for (int i = 0, len = model.Instance.Length; i < len; i++)
            {
                _instance.Add(lap.Create(model.Instance[i].Data));
            }
        }
Exemple #25
0
        // normal method removed until GPU provider can properly calculate matrix inverses!

        //public LinearRegression Solve()
        //{
        //    // solve using normal method
        //    using (var lambdaMatrix = _lap.CreateIdentity(_feature.ColumnCount))
        //    using (var zero = _lap.Create(1, 0f)) {
        //        lambdaMatrix.UpdateColumn(0, zero.AsIndexable(), 0);

        //        using (var featureTranspose = _feature.Transpose())
        //        using (var pinv = featureTranspose.Multiply(_feature))
        //        using (var pinv2 = pinv.Add(lambdaMatrix))
        //        using (var pinv3 = pinv2.Inverse())
        //        using (var tc = _target.ToColumnMatrix())
        //        using (var a2 = featureTranspose.Multiply(tc))
        //        using (var ret = pinv3.Multiply(a2))
        //        using (var theta = ret.Column(0)) {
        //            return new LinearRegression {
        //                Theta = theta.Data
        //            };
        //        }
        //    }
        //}

        public LinearRegression GradientDescent(int iterations, float learningRate, float lambda = 0.1f, Func <float, bool> costCallback = null)
        {
            var regularisation = 1f - (learningRate * lambda) / _feature.RowCount;
            var theta          = _lap.Create(_feature.ColumnCount, 0f);

            using (var regularisationVector = _lap.Create(theta.Count, i => i == 0 ? 1f : regularisation)) {
                for (var i = 0; i < iterations; i++)
                {
                    if (costCallback != null)
                    {
                        var cost = ComputeCost(theta, lambda);
                        if (!costCallback(cost))
                        {
                            break;
                        }
                    }

                    using (var p = _feature.Multiply(theta))
                        using (var pc = p.Column(0))
                            using (var e = pc.Subtract(_target))
                                using (var e2 = e.ToRowMatrix())
                                    using (var d = e2.Multiply(_feature))
                                        using (var delta = d.Row(0)) {
                                            delta.Multiply(learningRate);
                                            using (var temp = theta.PointwiseMultiply(regularisationVector)) {
                                                var theta2 = temp.Subtract(delta);
                                                theta.Dispose();
                                                theta = theta2;
                                            }
                                        }
                }
            }

            var ret = new LinearRegression {
                Theta = theta.Data
            };

            theta.Dispose();
            return(ret);
        }
Exemple #26
0
        public IReadOnlyList <IVector> GetNumericColumns(ILinearAlgebraProvider lap, IEnumerable <int> columns = null)
        {
            var columnTable = (columns ?? Enumerable.Range(0, ColumnCount)).ToDictionary(i => i, i => new float[RowCount]);

            int index = 0;

            _Iterate(row => {
                foreach (var item in columnTable)
                {
                    item.Value[index] = row.GetField <float>(item.Key);
                }
                ++index;
                return(true);
            });

            return(columnTable.OrderBy(kv => kv.Key).Select(kv => lap.Create(kv.Value)).ToList());
        }
Exemple #27
0
        public IReadOnlyList <IVector> GetNumericRows(ILinearAlgebraProvider lap, IEnumerable <int> columns = null)
        {
            var columnList = new List <int>(columns ?? Enumerable.Range(0, ColumnCount));

            var ret = new List <IVector>();

            _Iterate(row => {
                int index  = 0;
                var buffer = new float[columnList.Count];
                foreach (var item in columnList)
                {
                    buffer[index++] = row.GetField <float>(item);
                }
                ret.Add(lap.Create(buffer));
                return(true);
            });

            return(ret);
        }
Exemple #28
0
        IEnumerable <Tuple <string, float> > _Classify(IRow row)
        {
            // encode the features into a vector
            var featureCount = _model.FeatureColumn.Length;
            var features     = new float[featureCount];

            for (var i = 0; i < featureCount; i++)
            {
                features[i] = row.GetField <float>(_model.FeatureColumn[i]);
            }

            // find the k closest neighbours and score the results based on proximity to rank the classifications
            using (var vector = _lap.Create(features)) {
                var distances = vector.FindDistances(_instance, _distanceMetric).AsIndexable();
                return(distances.Values
                       .Zip(_model.Classification, (s, l) => Tuple.Create(l, s))
                       .OrderBy(d => d.Item2)
                       .Take(_k)
                       .GroupBy(d => d.Item1)
                       .Select(g => Tuple.Create(g.Key, g.Sum(d => 1f / d.Item2)))
                       );
            }
        }
        public IReadOnlyList <IReadOnlyList <IVector> > Cluster(IReadOnlyList <IVector> data, int numIterations, float errorThreshold = 0.001f)
        {
            if (data.Count == 0)
            {
                return(new List <IVector[]>());
            }

            // create the main matrix
            var data2 = new List <IIndexableVector>();

            foreach (var item in data)
            {
                data2.Add(item.AsIndexable());
            }
            using (var v = _lap.Create(data.Count, data.First().Count, (x, y) => data2[x][y])) {
                data2.ForEach(d => d.Dispose());

                // create the weights and features
                var rand     = new Random();
                var weights  = _lap.Create(v.RowCount, _numClusters, (x, y) => Convert.ToSingle(rand.NextDouble()));
                var features = _lap.Create(_numClusters, v.ColumnCount, (x, y) => Convert.ToSingle(rand.NextDouble()));

                // iterate
                float lastCost = 0;
                for (int i = 0; i < numIterations; i++)
                {
                    using (var wh = weights.Multiply(features)) {
                        var cost = _DifferenceCost(v, wh);
                        if (i % (numIterations / 10) == 0)
                        {
                            Console.WriteLine("NNMF cost: " + cost);
                        }
                        if (cost <= errorThreshold)
                        {
                            break;
                        }
                        lastCost = cost;

                        using (var wT = weights.Transpose())
                            using (var hn = wT.Multiply(v))
                                using (var wTw = wT.Multiply(weights))
                                    using (var hd = wTw.Multiply(features))
                                        using (var fhn = features.PointwiseMultiply(hn)) {
                                            features.Dispose();
                                            features = fhn.PointwiseDivide(hd);
                                        }

                        using (var fT = features.Transpose())
                            using (var wn = v.Multiply(fT))
                                using (var wf = weights.Multiply(features))
                                    using (var wd = wf.Multiply(fT))
                                        using (var wwn = weights.PointwiseMultiply(wn)) {
                                            weights.Dispose();
                                            weights = wwn.PointwiseDivide(wd);
                                        }
                    }
                }

                // weights gives cluster membership
                var documentClusters = weights.AsIndexable().Rows.Select((c, i) => Tuple.Create(i, c.MaximumIndex())).ToList();
                weights.Dispose();
                features.Dispose();
                return(documentClusters.GroupBy(d => d.Item2).Select(g => g.Select(d => data[d.Item1]).ToArray()).ToList());
            }
        }
Exemple #30
0
 /// <summary>
 /// Converts the image to a matrix
 /// </summary>
 /// <param name="lap"></param>
 /// <returns></returns>
 public IMatrix AsMatrix(ILinearAlgebraProvider lap)
 {
     return(lap.Create(Width, Height, (i, j) => Data[j * Width + i]));
 }