// Classify data using trained network parameters and count classification errors
        public int Classify(ManagedArray test_input, ManagedArray test_output, int classes, int items, int batchsize, ManagedArray classification, bool pool = false)
        {
            var errors = 0;

            var tempx     = new ManagedArray(test_input.x, test_input.y, batchsize, false);
            var tempy     = new ManagedArray(batchsize, classes, false);
            var tempclass = new ManagedArray(1, batchsize, false);

            ManagedOps.Free(classification);

            classification = new ManagedArray(1, items, false);

            for (var i = 0; i < items; i += batchsize)
            {
                // generate batch
                ManagedOps.Copy3D(tempx, test_input, 0, 0, i);
                ManagedOps.Copy2D(tempy, test_output, i, 0);

                // classify
                FeedForward(tempx, pool);

                // count classifcation errors
                errors += Test(tempy, tempclass);

                // save classification
                ManagedOps.Copy2DOffset(classification, tempclass, i, 0);
            }

            ManagedOps.Free(tempx, tempy, tempclass);

            return(errors);
        }
Exemple #2
0
        // Combine two arrays column-wise
        public static ManagedArray CBind(ManagedArray A, ManagedArray B)
        {
            if (A.y == B.y)
            {
                var resultx = A.x + B.x;
                var resulty = A.y;

                var result = new ManagedArray(resultx, resulty, false);

                ManagedOps.Copy2DOffset(result, A, 0, 0);
                ManagedOps.Copy2DOffset(result, B, A.x, 0);

                return(result);
            }

            return(null);
        }
        // Compute Forward Transform on 3D Input
        public void FeedForward(ManagedArray batch, bool pool = false)
        {
            var n    = Layers.Count;
            var last = n - 1;

            var InputMaps = 1;

            ManagedOps.Free(Layers[0].Activation);
            Layers[0].Activation = new ManagedArray(batch, false);

            ManagedOps.Copy4D3D(Layers[0].Activation, batch, 0);

            for (var l = 1; l < n; l++)
            {
                var prev = l - 1;

                if (Layers[l].Type == LayerTypes.Convolution)
                {
                    var zx = Layers[prev].Activation.x - Layers[l].KernelSize + 1;
                    var zy = Layers[prev].Activation.y - Layers[l].KernelSize + 1;
                    var zz = batch.z;

                    ManagedOps.Free(Layers[l].Activation);
                    Layers[l].Activation = new ManagedArray(zx, zy, zz, Layers[l].OutputMaps, 1, false);

                    var Activation = new ManagedArray(Layers[prev].Activation.x, Layers[prev].Activation.y, batch.z, false);
                    var FeatureMap = new ManagedArray(Layers[l].KernelSize, Layers[l].KernelSize, false);

                    // create temp output map
                    var z     = new ManagedArray(zx, zy, zz);
                    var ztemp = new ManagedArray(zx, zy, zz, false);

                    // !!below can probably be handled by insane matrix operations
                    for (var j = 0; j < Layers[l].OutputMaps; j++) // for each output map
                    {
                        ManagedOps.Set(z, 0.0);

                        for (var i = 0; i < InputMaps; i++)
                        {
                            // copy Layers
                            ManagedOps.Copy4D3D(Activation, Layers[prev].Activation, i);
                            ManagedOps.Copy4DIJ2D(FeatureMap, Layers[l].FeatureMap, i, j);

                            // convolve with corresponding kernel and add to temp output map
                            ManagedConvolution.Valid(Activation, FeatureMap, ztemp);
                            ManagedMatrix.Add(z, ztemp);
                        }

                        // add bias, pass through nonlinearity
                        ManagedMatrix.Add(z, Layers[l].Bias[j]);
                        var sigm = ManagedMatrix.Sigm(z);
                        ManagedOps.Copy3D4D(Layers[l].Activation, sigm, j);

                        ManagedOps.Free(sigm);
                    }

                    ManagedOps.Free(Activation, FeatureMap, z, ztemp);

                    InputMaps = Layers[l].OutputMaps;
                }
                else if (Layers[l].Type == LayerTypes.Subsampling)
                {
                    // downsample

                    // generate downsampling kernel
                    var scale      = (double)(Layers[l].Scale * Layers[l].Scale);
                    var FeatureMap = new ManagedArray(Layers[l].Scale, Layers[l].Scale, false);
                    ManagedOps.Set(FeatureMap, 1.0 / scale);

                    ManagedOps.Free(Layers[l].Activation);
                    Layers[l].Activation = new ManagedArray(Layers[prev].Activation.x / Layers[l].Scale, Layers[prev].Activation.y / Layers[l].Scale, batch.z, InputMaps, 1);

                    var Activation = new ManagedArray(Layers[prev].Activation.x, Layers[prev].Activation.y, batch.z, false);
                    var z          = new ManagedArray(Layers[prev].Activation.x - Layers[l].Scale + 1, Layers[prev].Activation.y - Layers[l].Scale + 1, batch.z, false);

                    for (var j = 0; j < InputMaps; j++)
                    {
                        // copy Layers
                        ManagedOps.Copy4D3D(Activation, Layers[prev].Activation, j);

                        // Subsample
                        ManagedConvolution.Valid(Activation, FeatureMap, z);

                        if (pool)
                        {
                            ManagedOps.Pool3D4D(Layers[l].Activation, z, j, Layers[l].Scale);
                        }
                        else
                        {
                            ManagedOps.Copy3D4D(Layers[l].Activation, z, j, Layers[l].Scale);
                        }
                    }

                    ManagedOps.Free(Activation, FeatureMap, z);
                }
            }

            var MapSize = Layers[last].Activation.x * Layers[last].Activation.y;

            ManagedOps.Free(FeatureVector);
            FeatureVector = new ManagedArray(batch.z, MapSize * Layers[last].Activation.i);

            var temp1D = new ManagedArray(Layers[last].Activation.y, Layers[last].Activation.x, false);
            var temp2D = new ManagedArray(Layers[last].Activation.x, Layers[last].Activation.y, false);

            // concatenate all end layer feature maps into vector
            for (var j = 0; j < Layers[last].Activation.i; j++)
            {
                for (var ii = 0; ii < batch.z; ii++)
                {
                    // Use Row-major in flattening the feature map
                    ManagedOps.Copy4D2D(temp2D, Layers[last].Activation, ii, j);
                    ManagedMatrix.Transpose(temp1D, temp2D);
                    temp1D.Reshape(1, MapSize);
                    ManagedOps.Copy2DOffset(FeatureVector, temp1D, ii, j * MapSize);
                }
            }

            var WeightsFeatureVector = new ManagedArray(FeatureVector.x, Weights.y, false);

            ManagedMatrix.Multiply(WeightsFeatureVector, Weights, FeatureVector);
            var repmat = new ManagedArray(batch.z, Bias.Length(), false);

            ManagedMatrix.Expand(Bias, batch.z, 1, repmat);
            ManagedMatrix.Add(WeightsFeatureVector, repmat);

            // feedforward into output perceptrons
            ManagedOps.Free(Output);
            Output = ManagedMatrix.Sigm(WeightsFeatureVector);

            ManagedOps.Free(WeightsFeatureVector, repmat, temp1D, temp2D);
        }