Ejemplo n.º 1
0
    public static Pixbuf Get(ManagedArray layer, bool transpose = true)
    {
        if (transpose)
        {
            var Transposed = new ManagedArray(layer, false);
            ManagedMatrix.Transpose(Transposed, layer);

            var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, Transposed.x, Transposed.y);

            double min = Double.MaxValue;
            double max = Double.MinValue;

            GetNormalization(Transposed, ref min, ref max);

            Activation.Draw(pixbuf, Transposed, min, max);

            ManagedOps.Free(Transposed);

            return(pixbuf);
        }
        else
        {
            var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, layer.x, layer.y);

            double min = Double.MaxValue;
            double max = Double.MinValue;

            GetNormalization(layer, ref min, ref max);

            Activation.Draw(pixbuf, layer, min, max);

            return(pixbuf);
        }
    }
Ejemplo n.º 2
0
    public static void Pixbuf(Pixbuf Digit, ManagedCNN cnn, ref int digit, ref double[] Probability)
    {
        // Bitmap Data is transposed
        var Transposed = new ManagedArray(28, 28, 1);
        var TestDigit  = new ManagedArray(28, 28, 1);

        var ScaledDigit = Digit.ScaleSimple(28, 28, InterpType.Hyper);

        Convert(ScaledDigit, TestDigit);
        ManagedMatrix.Transpose(Transposed, TestDigit);
        cnn.FeedForward(Transposed);

        digit = 0;
        double max = double.MinValue;

        for (int y = 0; y < cnn.Output.y; y++)
        {
            var val = cnn.Output[0, y];

            Probability[y] = val;

            if (val > max)
            {
                max   = val;
                digit = y;
            }
        }

        ScaledDigit.Dispose();

        ManagedOps.Free(TestDigit, Transposed);
    }
Ejemplo n.º 3
0
    public static Pixbuf Get(ManagedCNN cnn, int layer, int i, int j)
    {
        if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution && i >= 0 && i < cnn.Layers[layer].FeatureMap.i && j >= 0 && j < cnn.Layers[layer].FeatureMap.j)
        {
            var FeatureMap = new ManagedArray(cnn.Layers[layer].FeatureMap.x, cnn.Layers[layer].FeatureMap.y, cnn.Layers[layer].FeatureMap.z);
            var Transposed = new ManagedArray(FeatureMap);
            var pixbuf     = new Pixbuf(Colorspace.Rgb, false, 8, FeatureMap.y, FeatureMap.x);

            ManagedOps.Copy4DIJ2D(FeatureMap, cnn.Layers[layer].FeatureMap, i, j);
            ManagedMatrix.Transpose(Transposed, FeatureMap);

            // Get normalization values
            double min = Double.MaxValue;
            double max = Double.MinValue;

            FullyConnected.GetNormalization(Transposed, ref min, ref max);

            Activation.Draw(pixbuf, Transposed, min, max);

            ManagedOps.Free(Transposed);

            return(pixbuf);
        }

        // return empty pixbuf
        return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1));
    }
Ejemplo n.º 4
0
    public static Pixbuf Get(ManagedCNN cnn, int layer)
    {
        if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution)
        {
            var Transposed = new ManagedArray(cnn.Layers[layer].Bias);
            ManagedMatrix.Transpose(Transposed, cnn.Layers[layer].Bias);

            var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, Transposed.x, Transposed.y);

            // Get normalization values
            double min = Double.MaxValue;
            double max = Double.MinValue;

            FullyConnected.GetNormalization(Transposed, ref min, ref max);

            Activation.Draw(pixbuf, Transposed, min, max);

            ManagedOps.Free(Transposed);

            return(pixbuf);
        }

        // return empty pixbuf
        return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1));
    }
Ejemplo n.º 5
0
        public static void Bitmap(Bitmap Digit, ManagedCNN cnn, ref int digit, ref double[] Probability)
        {
            // Bitmap Data is transposed
            var Transposed = new ManagedArray(28, 28, 1);
            var TestDigit  = new ManagedArray(28, 28, 1);

            var ScaledDigit = Resize(Digit, 28, 28, true);

            Convert(ScaledDigit, TestDigit);
            ManagedMatrix.Transpose(Transposed, TestDigit);
            cnn.FeedForward(Transposed);

            digit = 0;
            double max = 0;

            for (int y = 0; y < cnn.Output.y; y++)
            {
                var val = cnn.Output[0, y];

                Probability[y] = val;

                if (val > max)
                {
                    max   = val;
                    digit = y;
                }
            }

            ScaledDigit.Dispose();
            ManagedOps.Free(TestDigit, Transposed);
        }
Ejemplo n.º 6
0
        public void Generate()
        {
            var m = Rows(dx);
            var n = Cols(dx);

            var idx = 0;

            for (var i = 0; i < m; i++)
            {
                if (Math.Abs(alpha[i]) > 0)
                {
                    idx++;
                }
            }

            ManagedOps.Free(ModelX, ModelY, Alpha, W, KernelParam);

            ModelX      = new ManagedArray(Cols(dx), idx);
            ModelY      = new ManagedArray(1, idx);
            Alpha       = new ManagedArray(1, idx);
            KernelParam = new ManagedArray(kparam);

            var ii = 0;

            for (var i = 0; i < m; i++)
            {
                if (Math.Abs(alpha[i]) > 0)
                {
                    for (int j = 0; j < n; j++)
                    {
                        ModelX[j, ii] = dx[j, i];
                    }

                    ModelY[ii] = dy[i];

                    Alpha[ii] = alpha[i];

                    ii++;
                }
            }

            B      = b;
            Passes = Iterations;
            ManagedOps.Copy2D(KernelParam, kparam, 0, 0);
            Type = ktype;

            var axy = ManagedMatrix.BSXMUL(alpha, dy);
            var tay = ManagedMatrix.Transpose(axy);
            var txx = ManagedMatrix.Multiply(tay, dx);

            W = ManagedMatrix.Transpose(txx);

            Trained = true;

            ManagedOps.Free(dx, dy, K, kparam, E, alpha, axy, tay, txx);
        }
Ejemplo n.º 7
0
        static double Multiply(ManagedArray x1, ManagedArray x2)
        {
            Vectorize(x1, x2);

            var tx = ManagedMatrix.Transpose(x1);
            var xx = ManagedMatrix.Multiply(tx, x2);

            var x = xx[0];

            ManagedOps.Free(tx, xx);

            return(x);
        }
Ejemplo n.º 8
0
        public static Bitmap Get(ManagedCNN cnn, int layer, bool transpose = true)
        {
            if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution)
            {
                if (transpose)
                {
                    var Transposed = new ManagedArray(cnn.Layers[layer].Bias, false);
                    ManagedMatrix.Transpose(Transposed, cnn.Layers[layer].Bias);

                    var bitmap = new Bitmap(Transposed.x, Transposed.y, PixelFormat.Format24bppRgb);

                    // Get normalization values
                    double min = Double.MaxValue;
                    double max = Double.MinValue;

                    GetNormalization(Transposed, ref min, ref max);

                    Draw(bitmap, Transposed, min, max);

                    ManagedOps.Free(Transposed);

                    return(bitmap);
                }
                else
                {
                    var bitmap = new Bitmap(cnn.Layers[layer].Bias.x, cnn.Layers[layer].Bias.y, PixelFormat.Format24bppRgb);

                    // Get normalization values
                    double min = Double.MaxValue;
                    double max = Double.MinValue;

                    GetNormalization(cnn.Layers[layer].Bias, ref min, ref max);

                    Draw(bitmap, cnn.Layers[layer].Bias, min, max);

                    return(bitmap);
                }
            }

            // return empty bitmap
            return(new Bitmap(1, 1, PixelFormat.Format24bppRgb));
        }
Ejemplo n.º 9
0
        public static Bitmap Get(ManagedCNN cnn, int layer, int map)
        {
            if (layer >= 0 && layer < cnn.Layers.Count && map >= 0 && map < cnn.Layers[layer].Activation.i)
            {
                var Activation = new ManagedArray(cnn.Layers[layer].Activation.x, cnn.Layers[layer].Activation.y, cnn.Layers[layer].Activation.z);
                var Transposed = new ManagedArray(Activation);
                var bitmap     = new Bitmap(cnn.Layers[layer].Activation.x, cnn.Layers[layer].Activation.y, PixelFormat.Format24bppRgb);

                ManagedOps.Copy4D2D(Activation, cnn.Layers[layer].Activation, 0, map);
                ManagedMatrix.Transpose(Transposed, Activation);

                // Get normalization values
                double min = Double.MaxValue;
                double max = Double.MinValue;

                for (int y = 0; y < Transposed.y; y++)
                {
                    for (int x = 0; x < Transposed.x; x++)
                    {
                        if (Transposed[x, y] > max)
                        {
                            max = Transposed[x, y];
                        }

                        if (Transposed[x, y] < min)
                        {
                            min = Transposed[x, y];
                        }
                    }
                }

                Draw(bitmap, Transposed, min, max);

                ManagedOps.Free(Activation, Transposed);

                return(bitmap);
            }

            // return empty bitmap
            return(new Bitmap(1, 1, PixelFormat.Format24bppRgb));
        }
Ejemplo n.º 10
0
    public static Pixbuf Get(ManagedCNN cnn, int layer, int map)
    {
        if (layer >= 0 && layer < cnn.Layers.Count && map >= 0 && map < cnn.Layers[layer].Activation.i)
        {
            var Activation = new ManagedArray(cnn.Layers[layer].Activation.x, cnn.Layers[layer].Activation.y, cnn.Layers[layer].Activation.z);
            var Transposed = new ManagedArray(Activation);
            var pixbuf     = new Pixbuf(Colorspace.Rgb, false, 8, Activation.y, Activation.x);

            ManagedOps.Copy4D2D(Activation, cnn.Layers[layer].Activation, 0, map);
            ManagedMatrix.Transpose(Transposed, Activation);

            // Get normalization values
            double min = Double.MaxValue;
            double max = double.MinValue;

            for (int y = 0; y < Transposed.y; y++)
            {
                for (int x = 0; x < Transposed.x; x++)
                {
                    if (Transposed[x, y] > max)
                    {
                        max = Transposed[x, y];
                    }

                    if (Transposed[x, y] < min)
                    {
                        min = Transposed[x, y];
                    }
                }
            }

            Draw(pixbuf, Transposed, min, max);

            ManagedOps.Free(Activation, Transposed);

            return(pixbuf);
        }

        // return empty pixbuf
        return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1));
    }
Ejemplo n.º 11
0
        public static Bitmap Get(ManagedCNN cnn, int layer, int i, int j)
        {
            if (layer >= 0 && layer < cnn.Layers.Count && i >= 0 && i < cnn.Layers[layer].FeatureMap.i && j >= 0 && j < cnn.Layers[layer].FeatureMap.j)
            {
                var FeatureMap = new ManagedArray(cnn.Layers[layer].FeatureMap.x, cnn.Layers[layer].FeatureMap.y, cnn.Layers[layer].FeatureMap.z);
                var Transposed = new ManagedArray(FeatureMap);
                var bitmap     = new Bitmap(cnn.Layers[layer].FeatureMap.x, cnn.Layers[layer].FeatureMap.y, PixelFormat.Format24bppRgb);

                ManagedOps.Copy4DIJ2D(FeatureMap, cnn.Layers[layer].FeatureMap, i, j);
                ManagedMatrix.Transpose(Transposed, FeatureMap);

                // Get normalization values
                double min = Double.MaxValue;
                double max = Double.MinValue;

                for (int y = 0; y < Transposed.y; y++)
                {
                    for (int x = 0; x < Transposed.x; x++)
                    {
                        if (Transposed[x, y] > max)
                        {
                            max = Transposed[x, y];
                        }

                        if (Transposed[x, y] < min)
                        {
                            min = Transposed[x, y];
                        }
                    }
                }

                Draw(bitmap, Transposed, min, max);

                ManagedOps.Free(FeatureMap, Transposed);

                return(bitmap);
            }

            // return empty bitmap
            return(new Bitmap(1, 1, PixelFormat.Format24bppRgb));
        }
Ejemplo n.º 12
0
        public static Bitmap Get(ManagedArray layer, bool transpose = true)
        {
            Console.WriteLine("Layer dimensions: {0} {1}", layer.x, layer.y);

            if (transpose)
            {
                var Transposed = new ManagedArray(layer, false);
                ManagedMatrix.Transpose(Transposed, layer);

                var bitmap = new Bitmap(Transposed.x, Transposed.y, PixelFormat.Format24bppRgb);

                // Get normalization values
                double min = Double.MaxValue;
                double max = Double.MinValue;

                GetNormalization(Transposed, ref min, ref max);

                Draw(bitmap, Transposed, min, max);

                ManagedOps.Free(Transposed);

                return(bitmap);
            }
            else
            {
                var bitmap = new Bitmap(layer.x, layer.y, PixelFormat.Format24bppRgb);

                // Get normalization values
                double min = Double.MaxValue;
                double max = Double.MinValue;

                GetNormalization(layer, ref min, ref max);

                Draw(bitmap, layer, min, max);

                return(bitmap);
            }
        }
Ejemplo n.º 13
0
        public void Setup(ManagedArray x, ManagedArray y, double c, KernelType kernel, ManagedArray param, double tolerance = 0.001, int maxpasses = 5, int category = 1)
        {
            ManagedOps.Free(dx, dy);
            dx = new ManagedArray(x);
            dy = new ManagedArray(y);

            ManagedOps.Copy2D(dx, x, 0, 0);
            ManagedOps.Copy2D(dy, y, 0, 0);

            ktype = kernel;

            // Data parameters
            var m = Rows(dx);

            Category      = category;
            MaxIterations = maxpasses;
            Tolerance     = tolerance;
            C             = c;

            // Reset internal variables
            ManagedOps.Free(K, kparam, E, alpha);

            kparam = new ManagedArray(param);
            ManagedOps.Copy2D(kparam, param, 0, 0);

            // Variables
            alpha      = new ManagedArray(1, m);
            E          = new ManagedArray(1, m);
            b          = 0;
            Iterations = 0;

            // Pre-compute the Kernel Matrix since our dataset is small
            // (In practice, optimized SVM packages that handle large datasets
            // gracefully will *not* do this)
            if (kernel == KernelType.LINEAR)
            {
                // Computation for the Linear Kernel
                // This is equivalent to computing the kernel on every pair of examples
                var tinput = ManagedMatrix.Transpose(dx);

                K = ManagedMatrix.Multiply(dx, tinput);

                double slope = kparam.Length() > 0 ? kparam[0] : 1;
                double inter = kparam.Length() > 1 ? kparam[1] : 0;

                ManagedMatrix.Multiply(K, slope);
                ManagedMatrix.Add(K, inter);

                ManagedOps.Free(tinput);
            }
            else if (kernel == KernelType.GAUSSIAN || kernel == KernelType.RADIAL)
            {
                // RBF Kernel
                // This is equivalent to computing the kernel on every pair of examples
                var pX2 = ManagedMatrix.Pow(dx, 2);
                var rX2 = ManagedMatrix.RowSums(pX2);
                var tX2 = ManagedMatrix.Transpose(rX2);
                var trX = ManagedMatrix.Transpose(dx);

                var tempK = new ManagedArray(m, m);
                var temp1 = new ManagedArray(m, m);
                var temp2 = ManagedMatrix.Multiply(dx, trX);

                ManagedMatrix.Expand(rX2, m, 1, tempK);
                ManagedMatrix.Expand(tX2, 1, m, temp1);
                ManagedMatrix.Multiply(temp2, -2);

                ManagedMatrix.Add(tempK, temp1);
                ManagedMatrix.Add(tempK, temp2);

                double sigma = kparam.Length() > 0 ? kparam[0] : 1;

                var g = Math.Abs(sigma) > 0 ? Math.Exp(-1 / (2 * sigma * sigma)) : 0;

                if (Type == KernelType.RADIAL)
                {
                    ManagedMatrix.Sqrt(tempK);
                }

                K = ManagedMatrix.Pow(g, tempK);

                ManagedOps.Free(pX2, rX2, tX2, trX, tempK, temp1, temp2);
            }
            else
            {
                // Pre-compute the Kernel Matrix
                // The following can be slow due to the lack of vectorization
                K = new ManagedArray(m, m);

                var Xi = new ManagedArray(Cols(dx), 1);
                var Xj = new ManagedArray(Cols(dx), 1);

                for (var i = 0; i < m; i++)
                {
                    ManagedOps.Copy2D(Xi, dx, 0, i);

                    for (var j = 0; j < m; j++)
                    {
                        ManagedOps.Copy2D(Xj, dx, 0, j);

                        K[j, i] = KernelFunction.Run(kernel, Xi, Xj, kparam);

                        // the matrix is symmetric
                        K[i, j] = K[j, i];
                    }
                }

                ManagedOps.Free(Xi, Xj);
            }

            eta = 0;
            L   = 0;
            H   = 0;

            // Map 0 (or other categories) to -1
            for (var i = 0; i < Rows(dy); i++)
            {
                dy[i] = (int)dy[i] != Category ? -1 : 1;
            }
        }
Ejemplo n.º 14
0
        // SVMPREDICT returns a vector of predictions using a trained SVM model
        //(svm_train).
        //
        // pred = SVMPREDICT(model, X) returns a vector of predictions using a
        // trained SVM model (svm_train). X is a mxn matrix where there each
        // example is a row. model is a svm model returned from svm_train.
        // predictions pred is a m x 1 column of predictions of {0, 1} values.
        //
        // Converted to R by: SD Separa (2016/03/18)
        // Converted to C# by: SD Separa (2018/09/29)
        public ManagedArray Predict(ManagedArray input)
        {
            var predictions = new ManagedArray(1, Rows(input));

            if (Trained)
            {
                var x = new ManagedArray(input);

                if (Cols(x) == 1)
                {
                    ManagedMatrix.Transpose(x, input);
                }
                else
                {
                    ManagedOps.Copy2D(x, input, 0, 0);
                }

                var m = Rows(x);

                predictions.Resize(1, m);

                if (Type == KernelType.LINEAR)
                {
                    ManagedMatrix.Multiply(predictions, x, W);
                    ManagedMatrix.Add(predictions, B);
                }
                else if (Type == KernelType.GAUSSIAN || Type == KernelType.RADIAL)
                {
                    // RBF Kernel
                    // This is equivalent to computing the kernel on every pair of examples
                    var pX1 = ManagedMatrix.Pow(x, 2);
                    var pX2 = ManagedMatrix.Pow(ModelX, 2);
                    var rX2 = ManagedMatrix.RowSums(pX2);

                    var X1 = ManagedMatrix.RowSums(pX1);
                    var X2 = ManagedMatrix.Transpose(rX2);
                    var tX = ManagedMatrix.Transpose(ModelX);
                    var tY = ManagedMatrix.Transpose(ModelY);
                    var tA = ManagedMatrix.Transpose(Alpha);

                    var rows = Rows(X1);
                    var cols = Cols(X2);

                    var tempK = new ManagedArray(cols, rows);
                    var temp1 = new ManagedArray(cols, rows);
                    var temp2 = ManagedMatrix.Multiply(x, tX);

                    ManagedMatrix.Multiply(temp2, -2);

                    ManagedMatrix.Expand(X1, cols, 1, tempK);
                    ManagedMatrix.Expand(X2, 1, rows, temp1);

                    ManagedMatrix.Add(tempK, temp1);
                    ManagedMatrix.Add(tempK, temp2);

                    var sigma = KernelParam.Length() > 0 ? KernelParam[0] : 1;

                    if (Type == KernelType.RADIAL)
                    {
                        ManagedMatrix.Sqrt(tempK);
                    }

                    var g = Math.Abs(sigma) > 0 ? Math.Exp(-1 / (2 * sigma * sigma)) : 0;

                    var Kernel = ManagedMatrix.Pow(g, tempK);

                    var tempY = new ManagedArray(Cols(tY), rows);
                    var tempA = new ManagedArray(Cols(tA), rows);

                    ManagedMatrix.Expand(tY, 1, rows, tempY);
                    ManagedMatrix.Expand(tA, 1, rows, tempA);

                    ManagedMatrix.Product(Kernel, tempY);
                    ManagedMatrix.Product(Kernel, tempA);

                    var p = ManagedMatrix.RowSums(Kernel);

                    ManagedOps.Copy2D(predictions, p, 0, 0);
                    ManagedMatrix.Add(predictions, B);

                    ManagedOps.Free(pX1, pX2, rX2, X1, X2, tempK, temp1, temp2, tX, tY, tA, tempY, tempA, Kernel, p);
                }
                else
                {
                    var Xi = new ManagedArray(Cols(x), 1);
                    var Xj = new ManagedArray(Cols(ModelX), 1);

                    for (var i = 0; i < m; i++)
                    {
                        double prediction = 0;

                        ManagedOps.Copy2D(Xi, x, 0, i);

                        for (var j = 0; j < Rows(ModelX); j++)
                        {
                            ManagedOps.Copy2D(Xj, ModelX, 0, j);

                            prediction += Alpha[j] * ModelY[j] * KernelFunction.Run(Type, Xi, Xj, KernelParam);
                        }

                        predictions[i] = prediction + B;
                    }

                    ManagedOps.Free(Xi, Xj);
                }

                ManagedOps.Free(x);
            }

            return(predictions);
        }