Пример #1
0
        public static void Save2D4D(string filename, ManagedArray A, int i, int j, char delimiter = ',')
        {
            using (var file = new StreamWriter(filename, false))
            {
                var temp = new ManagedArray(A.x, A.y);

                ManagedOps.Copy4DIJ2D(temp, A, i, j);

                for (int y = 0; y < A.y; y++)
                {
                    for (int x = 0; x < A.x; x++)
                    {
                        file.Write("{0}", temp[x, y].ToString(ci));

                        if (x < A.x - 1)
                        {
                            file.Write(delimiter);
                        }
                    }

                    file.WriteLine();
                }

                ManagedOps.Free(temp);
            }
        }
Пример #2
0
        public static double[,,,] Convert4DIJ(ManagedArray A)
        {
            var model = new double[A.i, A.j, A.y, A.x];

            var temp = new ManagedArray(A.x, A.y);

            for (var i = 0; i < A.i; i++)
            {
                for (var j = 0; j < A.j; j++)
                {
                    ManagedOps.Copy4DIJ2D(temp, A, i, j);

                    for (var y = 0; y < A.y; y++)
                    {
                        for (var x = 0; x < A.x; x++)
                        {
                            model[i, j, y, x] = temp[x, y];
                        }
                    }
                }
            }

            ManagedOps.Free(temp);

            return(model);
        }
Пример #3
0
        // Classify data using trained network parameters and count classification errors
        public int Classify(ManagedArray test_input, ManagedArray test_output, int classes, int items, int batchsize, ManagedArray classification, bool pool = false)
        {
            var errors = 0;

            var tempx     = new ManagedArray(test_input.x, test_input.y, batchsize, false);
            var tempy     = new ManagedArray(batchsize, classes, false);
            var tempclass = new ManagedArray(1, batchsize, false);

            ManagedOps.Free(classification);

            classification = new ManagedArray(1, items, false);

            for (var i = 0; i < items; i += batchsize)
            {
                // generate batch
                ManagedOps.Copy3D(tempx, test_input, 0, 0, i);
                ManagedOps.Copy2D(tempy, test_output, i, 0);

                // classify
                FeedForward(tempx, pool);

                // count classifcation errors
                errors += Test(tempy, tempclass);

                // save classification
                ManagedOps.Copy2DOffset(classification, tempclass, i, 0);
            }

            ManagedOps.Free(tempx, tempy, tempclass);

            return(errors);
        }
Пример #4
0
        public static void Load2D(string filename, ManagedArray A, char delimiter = ',')
        {
            if (File.Exists(filename))
            {
                var temp = new ManagedArray(A.x, A.y);

                var lines = File.ReadAllLines(filename);

                for (int y = 0; y < A.y; y++)
                {
                    if (y < lines.Length)
                    {
                        var tokens = lines[y].Split(delimiter);

                        for (int x = 0; x < A.x; x++)
                        {
                            temp[x, y] = Convert.ToDouble(tokens[x], ci);
                        }
                    }
                }

                ManagedOps.Copy2D(A, temp, 0, 0);

                ManagedOps.Free(temp);
            }
        }
Пример #5
0
        public static ManagedArray Set(double[,,,] A)
        {
            var ii = A.GetLength(0);
            var jj = A.GetLength(1);
            var yy = A.GetLength(2);
            var xx = A.GetLength(3);

            var model = new ManagedArray(xx, yy, 1, ii, jj);

            var temp = new ManagedArray(xx, yy);

            for (var i = 0; i < ii; i++)
            {
                for (var j = 0; j < jj; j++)
                {
                    for (var y = 0; y < yy; y++)
                    {
                        for (var x = 0; x < xx; x++)
                        {
                            temp[x, y] = A[i, j, y, x];
                        }
                    }

                    ManagedOps.Copy2D4DIJ(model, temp, i, j);
                }
            }

            ManagedOps.Free(temp);

            return(model);
        }
Пример #6
0
        public static void Load2DV2(string filename, ManagedArray A, char delimiter = ',')
        {
            if (File.Exists(filename))
            {
                var temp = new ManagedArray(A.x, A.y);

                using (TextReader reader = File.OpenText(filename))
                {
                    for (int y = 0; y < A.y; y++)
                    {
                        var line = reader.ReadLine();

                        if (line != null)
                        {
                            var tokens = line.Split(delimiter);

                            for (int x = 0; x < A.x; x++)
                            {
                                temp[x, y] = Convert.ToDouble(tokens[x], ci);
                            }
                        }
                    }
                }

                ManagedOps.Copy2D(A, temp, 0, 0);

                ManagedOps.Free(temp);
            }
        }
        public ManagedArray Predict(ManagedArray test, NeuralNetworkOptions opts)
        {
            Forward(test);

            var prediction = new ManagedArray(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    double maxval = Double.MinValue;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        double val = Yk[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                        }
                    }

                    prediction[y] = maxval;
                }
                else
                {
                    prediction[y] = Yk[y];
                }
            }

            // cleanup of arrays allocated in Forward
            ManagedOps.Free(A2, Yk, Z2);

            return(prediction);
        }
Пример #8
0
 public void ClearDeltas()
 {
     for (var layer = 0; layer < Weights.GetLength(0); layer++)
     {
         // cleanup of arrays allocated in BackPropagation
         ManagedOps.Free(Deltas[layer]);
     }
 }
Пример #9
0
        // Backward propagation
        public void BackPropagation(ManagedArray input)
        {
            var last = Weights.GetLength(0) - 1;

            D[0] = ManagedMatrix.Diff(Y, Y_true);

            var current = 1;

            for (var layer = last - 1; layer >= 0; layer--)
            {
                var prev = current - 1;

                var W  = new ManagedArray(Weights[layer + 1].x - 1, Weights[layer + 1].y, false);
                var DZ = ManagedMatrix.DSigm(Z[layer]);

                D[current] = (new ManagedArray(W.x, D[prev].y, false));

                ManagedOps.Copy2D(W, Weights[layer + 1], 1, 0);
                ManagedMatrix.Multiply(D[current], D[prev], W);
                ManagedMatrix.Product(D[current], DZ);

                ManagedOps.Free(W, DZ);

                current++;
            }

            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                var tD = ManagedMatrix.Transpose(D[Weights.GetLength(0) - layer - 1]);

                Deltas[layer] = (new ManagedArray(Weights[layer].x, Weights[layer].y, false));

                ManagedMatrix.Multiply(Deltas[layer], tD, X[layer]);
                ManagedMatrix.Multiply(Deltas[layer], 1.0 / input.y);

                ManagedOps.Free(tD);
            }

            Cost = 0.0;
            L2   = 0.0;

            for (var i = 0; i < Y_true.Length(); i++)
            {
                L2   += 0.5 * (D[0][i] * D[0][i]);
                Cost += (-Y_true[i] * Math.Log(Y[i]) - (1 - Y_true[i]) * Math.Log(1 - Y[i]));
            }

            Cost /= input.y;
            L2   /= input.y;

            // Cleanup
            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                ManagedOps.Free(D[layer], X[layer], Z[layer]);
            }
        }
Пример #10
0
        public void Setup(ManagedArray input, int classes)
        {
            var random = new Random(Guid.NewGuid().GetHashCode());

            var InputMaps = 1;

            var MapSizeX = input.x;
            var MapSizeY = input.y;

            for (var l = 0; l < Layers.Count; l++)
            {
                if (Layers[l].Type == LayerTypes.Subsampling)
                {
                    MapSizeX = MapSizeX / Layers[l].Scale;
                    MapSizeY = MapSizeY / Layers[l].Scale;
                }

                if (Layers[l].Type == LayerTypes.Convolution)
                {
                    MapSizeX = MapSizeX - Layers[l].KernelSize + 1;
                    MapSizeY = MapSizeY - Layers[l].KernelSize + 1;

                    Layers[l].FeatureMap = new ManagedArray(Layers[l].KernelSize, Layers[l].KernelSize, 1, InputMaps, Layers[l].OutputMaps);

                    var fan_out = Layers[l].OutputMaps * Layers[l].KernelSize * Layers[l].KernelSize;

                    for (var j = 0; j < Layers[l].OutputMaps; j++)
                    {
                        var fan_in = InputMaps * Layers[l].KernelSize * Layers[l].KernelSize;

                        for (var i = 0; i < InputMaps; i++)
                        {
                            var rand = new ManagedArray(Layers[l].KernelSize, Layers[l].KernelSize);
                            Rand(rand, random, fan_in, fan_out);
                            ManagedOps.Copy2D4DIJ(Layers[l].FeatureMap, rand, i, j);
                            ManagedOps.Free(rand);
                        }
                    }

                    Layers[l].Bias = new ManagedArray(Layers[l].OutputMaps);
                    InputMaps      = Layers[l].OutputMaps;
                }
            }

            // 'onum' is the number of labels, that's why it is calculated using size(y, 1). If you have 20 labels so the output of the network will be 20 neurons.
            // 'fvnum' is the number of output neurons at the last layer, the layer just before the output layer.
            // 'Bias' is the biases of the output neurons.
            // 'Weights' is the weights between the last layer and the output neurons. Note that the last layer is fully connected to the output layer, that's why the size of the weights is (onum * fvnum)

            var fvnum = MapSizeX * MapSizeY * InputMaps;
            var onum  = classes;

            Bias    = new ManagedArray(1, onum);
            Weights = new ManagedArray(fvnum, onum);
            Rand(Weights, random, fvnum, onum);
        }
        public void LoadHiddenLayerWeights(string BaseDirectory, string BaseFileName, int sizex, int sizey)
        {
            var filename = string.Format("{0}/{1}.txt", BaseDirectory, BaseFileName);

            ManagedOps.Free(Wkj);

            Wkj = new ManagedArray(sizex, sizey);

            ManagedFile.Load2DV2(filename, Wkj);
        }
Пример #12
0
        public void LoadNetworkBias(string BaseDirectory, string BaseFileName, int sizeb)
        {
            var filename = string.Format("{0}/{1}.txt", BaseDirectory, BaseFileName);

            ManagedOps.Free(Bias);

            Bias = new ManagedArray(1, sizeb);

            ManagedFile.Load1DY(filename, Bias);
        }
Пример #13
0
        public void LoadNetworkWeights(string BaseDirectory, string BaseFileName, int sizex, int sizey)
        {
            var filename = string.Format("{0}/{1}.txt", BaseDirectory, BaseFileName);

            ManagedOps.Free(Weights);

            Weights = new ManagedArray(sizex, sizey);

            ManagedFile.Load2D(filename, Weights);
        }
Пример #14
0
        public void LoadFeatureMapBias(string BaseDirectory, string BaseFileName, int layer, int sizei)
        {
            var filename = string.Format("{0}/{1}.txt", BaseDirectory, BaseFileName);

            ManagedOps.Free(Layers[layer].Bias);

            Layers[layer].Bias = new ManagedArray(sizei);

            ManagedFile.Load1D(filename, Layers[layer].Bias);
        }
Пример #15
0
        public void Train(ManagedArray input, ManagedArray output, ConvolutionalNeuralNetworkOptions opts)
        {
            var temp_input  = new ManagedArray(input.x, input.y, opts.BatchSize, false);
            var temp_output = new ManagedArray(opts.BatchSize, output.y, false);

            var index_list = new ManagedIntList(opts.Items);

            for (var epoch = 0; epoch < opts.Epochs; epoch++)
            {
                var start = Profiler.now();

                if (opts.Shuffle)
                {
                    ManagedOps.Shuffle(index_list);
                }

                var rLVal = 0.0;

                rL.Clear();

                for (var i = 0; i < opts.Items; i += opts.BatchSize)
                {
                    if (opts.Shuffle)
                    {
                        ManagedOps.Copy3D(temp_input, input, 0, 0, i, index_list);
                        ManagedOps.Copy2D(temp_output, output, i, 0, index_list);
                    }
                    else
                    {
                        ManagedOps.Copy3D(temp_input, input, 0, 0, i);
                        ManagedOps.Copy2D(temp_output, output, i, 0);
                    }

                    FeedForward(temp_input, opts.Pool);
                    BackPropagation(temp_output);
                    ApplyGradients(opts);

                    if (rL.Count == 0)
                    {
                        rL.Add(L);
                    }

                    rLVal = 0.99 * rL[rL.Count - 1] + 0.01 * L;

                    rL.Add(rLVal);
                }

                Console.WriteLine("epoch {0}/{1} elapsed time is {2} ms - Error: {3}", (epoch + 1).ToString("D", ManagedMatrix.ci), opts.Epochs.ToString("D", ManagedMatrix.ci), Profiler.Elapsed(start).ToString("D", ManagedMatrix.ci), rLVal.ToString("0.000000", ManagedMatrix.ci));
            }

            ManagedOps.Free(index_list);

            ManagedOps.Free(temp_input, temp_output);
        }
Пример #16
0
        public void ApplyGradients(NeuralNetworkOptions opts)
        {
            // dWji = learning_rate * dWji
            // dWkj = learning_rate * dWkj
            // w_ji = w_ji - dWji
            // w_kj = w_kj - dWkj
            ManagedMatrix.Add(Wkj, DeltaWkj, -opts.Alpha);
            ManagedMatrix.Add(Wji, DeltaWji, -opts.Alpha);

            // cleanup of arrays allocated in BackPropagation
            ManagedOps.Free(DeltaWji, DeltaWkj);
        }
Пример #17
0
        // Transforms x into a column vector
        public static void Vector(ManagedArray x)
        {
            var temp = new ManagedArray(x.y, x.x);

            Transpose(temp, x);

            x.Reshape(1, x.Length());

            for (var i = 0; i < x.Length(); i++)
            {
                x[i] = temp[i];
            }

            ManagedOps.Free(temp);
        }
Пример #18
0
        public void Free()
        {
            for (var i = 0; i < Layers.Count; i++)
            {
                ManagedOps.Free(Layers[i]);
            }

            ManagedOps.Free(Weights, WeightsDelta, WeightsTransposed);
            ManagedOps.Free(FeatureVector, FeatureVectorDelta);
            ManagedOps.Free(Output, OutputError, OutputDelta);
            ManagedOps.Free(Bias, BiasDelta);

            rL.Clear();
            Layers.Clear();
        }
        // Forward Propagation
        public void Forward(ManagedArray training)
        {
            // add bias column to input layer
            var InputBias = new ManagedArray(1, training.y);

            ManagedOps.Set(InputBias, 1.0);

            // x = cbind(array(1, c(nrow(training_set), 1)), training_set)
            var x = ManagedMatrix.CBind(InputBias, training);

            // compute hidden layer activation

            // z_2 = x %*% t(w_ji)
            var tWji = new ManagedArray(Wji.y, Wji.x);

            ManagedMatrix.Transpose(tWji, Wji);

            Z2 = new ManagedArray(tWji.x, x.y);
            ManagedMatrix.Multiply(Z2, x, tWji);

            // z_j = nnet_sigmoid(z_2)
            var Zj = ManagedMatrix.Sigm(Z2);

            // add bias column to hidden layer output
            var HiddenBias = new ManagedArray(1, Zj.y);

            ManagedOps.Set(HiddenBias, 1.0);

            // a_2 = cbind(array(1, c(nrow(z_j), 1)), z_j)
            A2 = ManagedMatrix.CBind(HiddenBias, Zj);

            // compute output layer

            var tWkj = new ManagedArray(Wkj.y, Wkj.x);

            ManagedMatrix.Transpose(tWkj, Wkj);

            //  y_k = nnet_sigmoid(a_2 %*% t(w_kj))
            var A2Wkj = new ManagedArray(tWkj.x, A2.y);

            ManagedMatrix.Multiply(A2Wkj, A2, tWkj);

            Yk = ManagedMatrix.Sigm(A2Wkj);

            // cleanup
            ManagedOps.Free(A2Wkj, HiddenBias, InputBias);
            ManagedOps.Free(tWkj, tWji, x, Zj);
        }
Пример #20
0
        public void LoadFeatureMap(string BaseDirectory, string BaseFileName, int layer, int sizex, int sizey, int sizei, int sizej)
        {
            ManagedOps.Free(Layers[layer].FeatureMap);

            Layers[layer].FeatureMap = new ManagedArray(sizex, sizey, 1, sizei, sizej);

            for (var i = 0; i < sizei; i++)
            {
                for (var j = 0; j < sizej; j++)
                {
                    var filename = string.Format("{0}/{1}{2,0:D2}{3,0:D2}.txt", BaseDirectory, BaseFileName, i + 1, j + 1);

                    ManagedFile.Load2D4D(filename, Layers[layer].FeatureMap, i, j);
                }
            }
        }
Пример #21
0
        // Rotate a 2D matrix
        public static void Rotate180(ManagedArray dst, ManagedArray src)
        {
            dst.Resize(src.x, src.y);

            var tmp = new ManagedArray(src.x, src.y, false);

            ManagedOps.Copy2D(tmp, src, 0, 0);

            for (int FlipDim = 0; FlipDim < 2; FlipDim++)
            {
                Flip(dst, tmp, FlipDim);

                ManagedOps.Copy2D(tmp, dst, 0, 0);
            }

            ManagedOps.Free(tmp);
        }
Пример #22
0
        // Combine two arrays column-wise
        public static ManagedArray CBind(ManagedArray A, ManagedArray B)
        {
            if (A.y == B.y)
            {
                var resultx = A.x + B.x;
                var resulty = A.y;

                var result = new ManagedArray(resultx, resulty, false);

                ManagedOps.Copy2DOffset(result, A, 0, 0);
                ManagedOps.Copy2DOffset(result, B, A.x, 0);

                return(result);
            }

            return(null);
        }
Пример #23
0
        // Flip 3D Matrix along a dimension
        public static void FlipAll(ManagedArray dst, ManagedArray src)
        {
            dst.Resize(src.x, src.y, src.z, false);

            var tmp = new ManagedArray(src.x, src.y, src.z, false);

            ManagedOps.Copy3D(tmp, src, 0, 0, 0);

            for (var FlipDim = 0; FlipDim < 3; FlipDim++)
            {
                Flip(dst, tmp, FlipDim);

                ManagedOps.Copy3D(tmp, dst, 0, 0, 0);
            }

            ManagedOps.Free(tmp);
        }
Пример #24
0
        public void Free()
        {
            ManagedOps.Free(Y);
            ManagedOps.Free(Y_true);

            if (Weights != null)
            {
                for (var layer = 0; layer < Weights.GetLength(0); layer++)
                {
                    ManagedOps.Free(Weights[layer]);
                }
            }

            if (Layers != null && Layers.Count > 0)
            {
                Layers.Clear();
            }
        }
Пример #25
0
        // Forward Propagation
        public void Forward(ManagedArray input)
        {
            // create bias column
            var InputBias = new ManagedArray(1, input.y, false);

            ManagedOps.Set(InputBias, 1.0);

            // Compute input activations
            var last = Weights.GetLength(0) - 1;

            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                var XX = layer == 0 ? ManagedMatrix.CBind(InputBias, input) : ManagedMatrix.CBind(InputBias, Activations[layer - 1]);
                var tW = ManagedMatrix.Transpose(Weights[layer]);
                var ZZ = ManagedMatrix.Multiply(XX, tW);

                X[layer] = XX;
                Z[layer] = ZZ;

                if (layer != last)
                {
                    var SS = ManagedMatrix.Sigm(ZZ);

                    Activations[layer] = SS;
                }
                else
                {
                    ManagedOps.Free(Y);

                    Y = ManagedMatrix.Sigm(ZZ);
                }

                ManagedOps.Free(tW);
            }

            // Cleanup
            for (var layer = 0; layer < Activations.GetLength(0); layer++)
            {
                ManagedOps.Free(Activations[layer]);
            }

            ManagedOps.Free(InputBias);
        }
Пример #26
0
        public ManagedIntList Classify(ManagedArray test, NeuralNetworkOptions opts, double threshold = 0.5)
        {
            Forward(test);

            var classification = new ManagedIntList(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    var maxval = double.MinValue;
                    var maxind = 0;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        var val = Y[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                            maxind = x;
                        }
                    }

                    classification[y] = maxind + 1;
                }
                else
                {
                    classification[y] = Y[y] > threshold ? 1 : 0;
                }
            }

            // cleanup of arrays allocated in Forward propagation
            ManagedOps.Free(Y);

            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                ManagedOps.Free(X[layer], Z[layer]);
            }

            return(classification);
        }
Пример #27
0
        public ManagedArray Predict(ManagedArray test, NeuralNetworkOptions opts)
        {
            Forward(test);

            var prediction = new ManagedArray(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    double maxval = Double.MinValue;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        double val = Y[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                        }
                    }

                    prediction[y] = maxval;
                }
                else
                {
                    prediction[y] = Y[y];
                }
            }

            // cleanup of arrays allocated in Forward propagation
            ManagedOps.Free(Y);

            // Cleanup
            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                ManagedOps.Free(X[layer], Z[layer]);
            }

            return(prediction);
        }
Пример #28
0
        public FuncOutput OptimizerCost(double[] X)
        {
            ReshapeWeights(X, Wji, Wkj);

            if (OptimizerInput != null)
            {
                Forward(OptimizerInput);
            }

            if (OptimizerInput != null)
            {
                BackPropagation(OptimizerInput);
            }

            X = ReshapeWeights(DeltaWji, DeltaWkj);

            // cleanup of arrays allocated in BackPropagation
            ManagedOps.Free(DeltaWji, DeltaWkj);

            return(new FuncOutput(Cost, X));
        }
        public ManagedIntList Classify(ManagedArray test, NeuralNetworkOptions opts, double threshold = 0.5)
        {
            Forward(test);

            var classification = new ManagedIntList(test.y);

            for (var y = 0; y < test.y; y++)
            {
                if (opts.Categories > 1)
                {
                    var maxval = double.MinValue;
                    var maxind = 0;

                    for (var x = 0; x < opts.Categories; x++)
                    {
                        var val = Yk[x, y];

                        if (val > maxval)
                        {
                            maxval = val;
                            maxind = x;
                        }
                    }

                    classification[y] = maxind + 1;
                }
                else
                {
                    classification[y] = Yk[y] > threshold ? 1 : 0;
                }
            }

            // cleanup of arrays allocated in Forward
            ManagedOps.Free(A2, Yk, Z2);

            return(classification);
        }
        ManagedArray Labels(ManagedArray output, NeuralNetworkOptions opts)
        {
            var result     = new ManagedArray(opts.Categories, opts.Items);
            var eye_matrix = ManagedMatrix.Diag(opts.Categories);

            for (var y = 0; y < opts.Items; y++)
            {
                if (opts.Categories > 1)
                {
                    for (var x = 0; x < opts.Categories; x++)
                    {
                        result[x, y] = eye_matrix[x, (int)output[y] - 1];
                    }
                }
                else
                {
                    result[y] = output[y];
                }
            }

            ManagedOps.Free(eye_matrix);

            return(result);
        }