Пример #1
0
        public static void Load2DV2(string filename, ManagedArray A, char delimiter = ',')
        {
            if (File.Exists(filename))
            {
                var temp = new ManagedArray(A.x, A.y);

                using (TextReader reader = File.OpenText(filename))
                {
                    for (int y = 0; y < A.y; y++)
                    {
                        var line = reader.ReadLine();

                        if (line != null)
                        {
                            var tokens = line.Split(delimiter);

                            for (int x = 0; x < A.x; x++)
                            {
                                temp[x, y] = Convert.ToDouble(tokens[x], ci);
                            }
                        }
                    }
                }

                ManagedOps.Copy2D(A, temp, 0, 0);

                ManagedOps.Free(temp);
            }
        }
Пример #2
0
        public static void Load2D(string filename, ManagedArray A, char delimiter = ',')
        {
            if (File.Exists(filename))
            {
                var temp = new ManagedArray(A.x, A.y);

                var lines = File.ReadAllLines(filename);

                for (int y = 0; y < A.y; y++)
                {
                    if (y < lines.Length)
                    {
                        var tokens = lines[y].Split(delimiter);

                        for (int x = 0; x < A.x; x++)
                        {
                            temp[x, y] = Convert.ToDouble(tokens[x], ci);
                        }
                    }
                }

                ManagedOps.Copy2D(A, temp, 0, 0);

                ManagedOps.Free(temp);
            }
        }
Пример #3
0
        // Classify data using trained network parameters and count classification errors
        public int Classify(ManagedArray test_input, ManagedArray test_output, int classes, int items, int batchsize, ManagedArray classification, bool pool = false)
        {
            var errors = 0;

            var tempx     = new ManagedArray(test_input.x, test_input.y, batchsize, false);
            var tempy     = new ManagedArray(batchsize, classes, false);
            var tempclass = new ManagedArray(1, batchsize, false);

            ManagedOps.Free(classification);

            classification = new ManagedArray(1, items, false);

            for (var i = 0; i < items; i += batchsize)
            {
                // generate batch
                ManagedOps.Copy3D(tempx, test_input, 0, 0, i);
                ManagedOps.Copy2D(tempy, test_output, i, 0);

                // classify
                FeedForward(tempx, pool);

                // count classifcation errors
                errors += Test(tempy, tempclass);

                // save classification
                ManagedOps.Copy2DOffset(classification, tempclass, i, 0);
            }

            ManagedOps.Free(tempx, tempy, tempclass);

            return(errors);
        }
Пример #4
0
        // Backward propagation
        public void BackPropagation(ManagedArray input)
        {
            var last = Weights.GetLength(0) - 1;

            D[0] = ManagedMatrix.Diff(Y, Y_true);

            var current = 1;

            for (var layer = last - 1; layer >= 0; layer--)
            {
                var prev = current - 1;

                var W  = new ManagedArray(Weights[layer + 1].x - 1, Weights[layer + 1].y, false);
                var DZ = ManagedMatrix.DSigm(Z[layer]);

                D[current] = (new ManagedArray(W.x, D[prev].y, false));

                ManagedOps.Copy2D(W, Weights[layer + 1], 1, 0);
                ManagedMatrix.Multiply(D[current], D[prev], W);
                ManagedMatrix.Product(D[current], DZ);

                ManagedOps.Free(W, DZ);

                current++;
            }

            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                var tD = ManagedMatrix.Transpose(D[Weights.GetLength(0) - layer - 1]);

                Deltas[layer] = (new ManagedArray(Weights[layer].x, Weights[layer].y, false));

                ManagedMatrix.Multiply(Deltas[layer], tD, X[layer]);
                ManagedMatrix.Multiply(Deltas[layer], 1.0 / input.y);

                ManagedOps.Free(tD);
            }

            Cost = 0.0;
            L2   = 0.0;

            for (var i = 0; i < Y_true.Length(); i++)
            {
                L2   += 0.5 * (D[0][i] * D[0][i]);
                Cost += (-Y_true[i] * Math.Log(Y[i]) - (1 - Y_true[i]) * Math.Log(1 - Y[i]));
            }

            Cost /= input.y;
            L2   /= input.y;

            // Cleanup
            for (var layer = 0; layer < Weights.GetLength(0); layer++)
            {
                ManagedOps.Free(D[layer], X[layer], Z[layer]);
            }
        }
Пример #5
0
        public void Train(ManagedArray input, ManagedArray output, ConvolutionalNeuralNetworkOptions opts)
        {
            var temp_input  = new ManagedArray(input.x, input.y, opts.BatchSize, false);
            var temp_output = new ManagedArray(opts.BatchSize, output.y, false);

            var index_list = new ManagedIntList(opts.Items);

            for (var epoch = 0; epoch < opts.Epochs; epoch++)
            {
                var start = Profiler.now();

                if (opts.Shuffle)
                {
                    ManagedOps.Shuffle(index_list);
                }

                var rLVal = 0.0;

                rL.Clear();

                for (var i = 0; i < opts.Items; i += opts.BatchSize)
                {
                    if (opts.Shuffle)
                    {
                        ManagedOps.Copy3D(temp_input, input, 0, 0, i, index_list);
                        ManagedOps.Copy2D(temp_output, output, i, 0, index_list);
                    }
                    else
                    {
                        ManagedOps.Copy3D(temp_input, input, 0, 0, i);
                        ManagedOps.Copy2D(temp_output, output, i, 0);
                    }

                    FeedForward(temp_input, opts.Pool);
                    BackPropagation(temp_output);
                    ApplyGradients(opts);

                    if (rL.Count == 0)
                    {
                        rL.Add(L);
                    }

                    rLVal = 0.99 * rL[rL.Count - 1] + 0.01 * L;

                    rL.Add(rLVal);
                }

                Console.WriteLine("epoch {0}/{1} elapsed time is {2} ms - Error: {3}", (epoch + 1).ToString("D", ManagedMatrix.ci), opts.Epochs.ToString("D", ManagedMatrix.ci), Profiler.Elapsed(start).ToString("D", ManagedMatrix.ci), rLVal.ToString("0.000000", ManagedMatrix.ci));
            }

            ManagedOps.Free(index_list);

            ManagedOps.Free(temp_input, temp_output);
        }
Пример #6
0
        // Rotate a 2D matrix
        public static void Rotate180(ManagedArray dst, ManagedArray src)
        {
            dst.Resize(src.x, src.y);

            var tmp = new ManagedArray(src.x, src.y, false);

            ManagedOps.Copy2D(tmp, src, 0, 0);

            for (int FlipDim = 0; FlipDim < 2; FlipDim++)
            {
                Flip(dst, tmp, FlipDim);

                ManagedOps.Copy2D(tmp, dst, 0, 0);
            }

            ManagedOps.Free(tmp);
        }
        // Backward propagation
        public void BackPropagation(ManagedArray training)
        {
            // add bias column to input layer
            var InputBias = new ManagedArray(1, training.y);

            ManagedOps.Set(InputBias, 1.0);

            // x = cbind(array(1, c(nrow(training_set), 1)), training_set)
            var x = ManagedMatrix.CBind(InputBias, training);

            // compute intermediate delta values per layer

            // d3 = y_k - y_matrix
            var D3 = ManagedMatrix.Diff(Yk, Y_output);

            //  d2 = d3 %*% w_kj[, 2:ncol(w_kj)] * nnet_dsigmoid(z_2)
            var sWkj = new ManagedArray(Wkj.x - 1, Wkj.y);

            ManagedOps.Copy2D(sWkj, Wkj, 1, 0);

            var D2 = new ManagedArray(sWkj.x, D3.y);

            ManagedMatrix.Multiply(D2, D3, sWkj);

            var DZ2 = ManagedMatrix.DSigm(Z2);

            ManagedMatrix.Product(D2, DZ2);

            // dWji = (t(d2) %*% x)
            // dWkj = (t(d3) % *% a_2)
            var tD2 = new ManagedArray(D2.y, D2.x);
            var tD3 = new ManagedArray(D3.y, D3.x);

            ManagedMatrix.Transpose(tD2, D2);
            ManagedMatrix.Transpose(tD3, D3);

            DeltaWji = new ManagedArray(Wji.x, Wji.y);
            DeltaWkj = new ManagedArray(Wkj.x, Wkj.y);

            ManagedMatrix.Multiply(DeltaWji, tD2, x);
            ManagedMatrix.Multiply(DeltaWkj, tD3, A2);

            // cost = sum(-y_matrix * log(y_k) - (1 - y_matrix) * log(1 - y_k))
            Cost = 0.0;
            L2   = 0.0;

            for (var i = 0; i < Y_output.Length(); i++)
            {
                L2   += 0.5 * (D3[i] * D3[i]);
                Cost += (-Y_output[i] * Math.Log(Yk[i]) - (1 - Y_output[i]) * Math.Log(1 - Yk[i]));
            }

            // cost = cost / m
            // dWji = dWji / m
            // dWkj = dWkj / m
            Cost /= training.y;
            L2   /= training.y;

            ManagedMatrix.Multiply(DeltaWji, 1.0 / training.y);
            ManagedMatrix.Multiply(DeltaWkj, 1.0 / training.y);

            // cleanup
            ManagedOps.Free(D2, D3, DZ2, InputBias);
            ManagedOps.Free(sWkj, tD2, tD3, x);

            // cleanup of arrays allocated in Forward
            ManagedOps.Free(A2, Yk, Z2);
        }
Пример #8
0
        // Update Network Weights based on computed errors
        public void BackPropagation(ManagedArray batch)
        {
            var n      = Layers.Count;
            var last   = n - 1;
            var batchz = Layers[last].Activation.z;

            // backprop deltas
            ManagedOps.Free(OutputDelta, OutputError);

            OutputDelta = new ManagedArray(Output, false);
            OutputError = new ManagedArray(Output, false);

            for (var x = 0; x < Output.Length(); x++)
            {
                // error
                OutputError[x] = Output[x] - batch[x];

                // output delta
                OutputDelta[x] = OutputError[x] * (Output[x] * (1 - Output[x]));
            }

            // Loss Function
            L = 0.5 * ManagedMatrix.SquareSum(OutputError) / batch.x;

            ManagedOps.Free(WeightsTransposed, FeatureVectorDelta);

            FeatureVectorDelta = new ManagedArray(FeatureVector, false);
            WeightsTransposed  = new ManagedArray(Weights, false);

            // feature vector delta
            ManagedMatrix.Transpose(WeightsTransposed, Weights);
            ManagedMatrix.Multiply(FeatureVectorDelta, WeightsTransposed, OutputDelta);

            // only conv layers has sigm function
            if (Layers[last].Type == LayerTypes.Convolution)
            {
                for (var x = 0; x < FeatureVectorDelta.Length(); x++)
                {
                    FeatureVectorDelta[x] = FeatureVectorDelta[x] * FeatureVector[x] * (1 - FeatureVector[x]);
                }
            }

            // reshape feature vector deltas into output map style
            var MapSize = Layers[last].Activation.x * Layers[last].Activation.y;
            var temp1D  = new ManagedArray(1, MapSize, false);
            var temp2D  = new ManagedArray(Layers[last].Activation.x, Layers[last].Activation.y, false);

            ManagedOps.Free(Layers[last].Delta);
            Layers[last].Delta = new ManagedArray(Layers[last].Activation, false);

            for (var j = 0; j < Layers[last].Activation.i; j++)
            {
                for (var ii = 0; ii < batchz; ii++)
                {
                    ManagedOps.Copy2D(temp1D, FeatureVectorDelta, ii, j * MapSize);
                    temp1D.Reshape(Layers[last].Activation.x, Layers[last].Activation.y);
                    ManagedMatrix.Transpose(temp2D, temp1D);
                    ManagedOps.Copy2D4D(Layers[last].Delta, temp2D, ii, j);
                    temp1D.Reshape(1, MapSize);
                }
            }

            ManagedOps.Free(temp1D, temp2D);

            for (var l = n - 2; l >= 0; l--)
            {
                var next = l + 1;

                if (Layers[l].Type == LayerTypes.Convolution)
                {
                    ManagedOps.Free(Layers[l].Delta);
                    Layers[l].Delta = new ManagedArray(Layers[l].Activation, false);

                    var xx = Layers[next].Scale * Layers[next].Activation.x;
                    var yy = Layers[next].Scale * Layers[next].Activation.y;

                    var FeatureMap         = new ManagedArray(Layers[next].Activation.x, Layers[next].Activation.y, false);
                    var FeatureMapExpanded = new ManagedArray(xx, yy, false);
                    var Activation         = new ManagedArray(xx, yy, false);
                    var Delta = new ManagedArray(xx, yy, false);

                    var Scale = (1.0 / (Layers[next].Scale * Layers[next].Scale));

                    for (var j = 0; j < Layers[l].Activation.i; j++)
                    {
                        for (var z = 0; z < batchz; z++)
                        {
                            ManagedOps.Copy4D2D(FeatureMap, Layers[next].Delta, z, j);
                            ManagedMatrix.Expand(FeatureMap, Layers[next].Scale, Layers[next].Scale, FeatureMapExpanded);
                            ManagedOps.Copy4D2D(Activation, Layers[l].Activation, z, j);

                            for (var x = 0; x < Delta.Length(); x++)
                            {
                                Delta[x] = Activation[x] * (1 - Activation[x]) * FeatureMapExpanded[x] * Scale;
                            }

                            ManagedOps.Copy2D4D(Layers[l].Delta, Delta, z, j);
                        }
                    }

                    ManagedOps.Free(FeatureMap, FeatureMapExpanded, Activation, Delta);
                }
                else if (Layers[l].Type == LayerTypes.Subsampling)
                {
                    ManagedOps.Free(Layers[l].Delta);
                    Layers[l].Delta = new ManagedArray(Layers[l].Activation, false);

                    var Delta      = new ManagedArray(Layers[next].Activation.x, Layers[next].Activation.y, batchz);
                    var FeatureMap = new ManagedArray(Layers[next].KernelSize, Layers[next].KernelSize, false);
                    var rot180     = new ManagedArray(Layers[next].KernelSize, Layers[next].KernelSize, false);
                    var z          = new ManagedArray(Layers[l].Activation.x, Layers[l].Activation.y, batchz);
                    var ztemp      = new ManagedArray(Layers[l].Activation.x, Layers[l].Activation.y, batchz, false);

                    for (var i = 0; i < Layers[l].Activation.i; i++)
                    {
                        ManagedOps.Set(z, 0.0);

                        for (var j = 0; j < Layers[next].Activation.i; j++)
                        {
                            ManagedOps.Copy4DIJ2D(FeatureMap, Layers[next].FeatureMap, i, j);
                            ManagedMatrix.Rotate180(rot180, FeatureMap);

                            ManagedOps.Copy4D3D(Delta, Layers[next].Delta, j);
                            ManagedConvolution.Full(Delta, rot180, ztemp);
                            ManagedMatrix.Add(z, ztemp);
                        }

                        ManagedOps.Copy3D4D(Layers[l].Delta, z, i);
                    }

                    ManagedOps.Free(Delta, FeatureMap, rot180, z, ztemp);
                }
            }

            // calc gradients
            for (var l = 1; l < n; l++)
            {
                var prev = l - 1;

                if (Layers[l].Type == LayerTypes.Convolution)
                {
                    ManagedOps.Free(Layers[l].DeltaFeatureMap, Layers[l].DeltaBias);

                    Layers[l].DeltaFeatureMap = new ManagedArray(Layers[l].FeatureMap, false);
                    Layers[l].DeltaBias       = new ManagedArray(Layers[l].OutputMaps, false);

                    var FeatureMapDelta = new ManagedArray(Layers[l].FeatureMap.x, Layers[l].FeatureMap.y, Layers[l].FeatureMap.z, false);

                    // d[j]
                    var dtemp = new ManagedArray(Layers[l].Activation.x, Layers[l].Activation.y, batchz, false);

                    // a[i] and flipped
                    var atemp = new ManagedArray(Layers[prev].Activation.x, Layers[prev].Activation.y, batchz, false);
                    var ftemp = new ManagedArray(Layers[prev].Activation.x, Layers[prev].Activation.y, batchz, false);

                    for (var j = 0; j < Layers[l].Activation.i; j++)
                    {
                        ManagedOps.Copy4D3D(dtemp, Layers[l].Delta, j);

                        for (var i = 0; i < Layers[prev].Activation.i; i++)
                        {
                            ManagedOps.Copy4D3D(atemp, Layers[prev].Activation, i);
                            ManagedMatrix.FlipAll(ftemp, atemp);
                            ManagedConvolution.Valid(ftemp, dtemp, FeatureMapDelta);
                            ManagedMatrix.Multiply(FeatureMapDelta, 1.0 / batchz);

                            ManagedOps.Copy2D4DIJ(Layers[l].DeltaFeatureMap, FeatureMapDelta, i, j);
                        }

                        Layers[l].DeltaBias[j] = ManagedMatrix.Sum(dtemp) / batchz;
                    }

                    ManagedOps.Free(FeatureMapDelta, dtemp, atemp, ftemp);
                }
            }

            var FeatureVectorTransposed = new ManagedArray(FeatureVector, false);

            ManagedMatrix.Transpose(FeatureVectorTransposed, FeatureVector);

            ManagedOps.Free(WeightsDelta, BiasDelta);

            WeightsDelta = new ManagedArray(Weights, false);
            BiasDelta    = new ManagedArray(Bias, false);

            ManagedMatrix.Multiply(WeightsDelta, OutputDelta, FeatureVectorTransposed);
            ManagedMatrix.Multiply(WeightsDelta, 1.0 / batchz);
            ManagedMatrix.Mean(BiasDelta, OutputDelta, 0);

            ManagedOps.Free(FeatureVectorTransposed);
        }