Ejemplo n.º 1
0
        public NDarray <double> Backward(NDarray <double> accumGrad)
        {
            var gamma0 = new NDarray <double>(gamma);

            var s0 = ND.SumAxis(accumGrad, 0);
            var s1 = ND.SumAxis(ND.MulNDarray(accumGrad, xCentered), 0);

            if (IsTraining)
            {
                var gGamma = ND.SumAxis(ND.MulNDarray(accumGrad, xNorm), 0);
                var gBeta  = s0;

                gOpt.Update(gamma, gGamma);
                bOpt.Update(beta, gBeta);
            }

            double batchSize = accumGrad.Shape[0];
            var    m0        = ND.MulNDarray(gamma0, stdDevInv, 1.0 / batchSize, 1);
            var    m1        = ND.MulNDarray(xNorm, ND.MulNDarray(stdDevInv, s1));
            var    diff      = ND.SubNDarray(accumGrad, ND.AddNDarray(s0, m1), batchSize, 1);

            accumGrad = ND.MulNDarray(m0, diff);

            return(accumGrad);
        }
Ejemplo n.º 2
0
        public NDarray <U> Backward(NDarray <U> accumGrad)
        {
            for (int i = 0; i < wTmp.Count; ++i)
            {
                wTmp.Data[i] = weight.Data[i];
            }

            if (IsTraining)
            {
                var gW = ND.GemmTAB(LastInput, accumGrad);
                var gB = ND.SumAxis(accumGrad, 0, true);

                wOpt.Update(weight, gW);
                bOpt.Update(biases, gB);
            }

            return(ND.GemmATB(accumGrad, wTmp));
        }
Ejemplo n.º 3
0
        public override NDArray <Type> Backward(NDArray <Type> accumGrad)
        {
            var Wtmp = new NDArray <Type>(W);

            if (trainable)
            {
                var gW = NDArray <Type> .Dot(layerInput.T, accumGrad);

                var gw0 = (new NDArray <Type>(accumGrad.Shape)) + NumDN.Sum(accumGrad);

                W  = WOpt.Update(W, gW);
                w0 = w0Opt.Update(w0, gw0);
            }

            var accumGrad0 = NDArray <Type> .Dot(accumGrad, W.T);

            return(accumGrad0);
        }
Ejemplo n.º 4
0
        /*
         * def backward_pass(self, accum_grad):
         # Reshape accumulated gradient into column shape
         #  accum_grad = accum_grad.transpose(1, 2, 3, 0).reshape(self.n_filters, -1)
         #
         #  if self.trainable:
         # Take dot product between column shaped accum. gradient and column shape
         # layer input to determine the gradient at the layer with respect to layer weights
         #      grad_w = accum_grad.dot(self.X_col.T).reshape(self.W.shape)
         # The gradient with respect to bias terms is the sum similarly to in Dense layer
         #      grad_w0 = np.sum(accum_grad, axis=1, keepdims=True)
         #
         # Update the layers weights
         #      self.W = self.W_opt.update(self.W, grad_w)
         #      self.w0 = self.w0_opt.update(self.w0, grad_w0)
         #
         # Recalculate the gradient which will be propogated back to prev. layer
         #  accum_grad = self.W_col.T.dot(accum_grad)
         # Reshape from column shape to image shape
         #  accum_grad = column_to_image(accum_grad,
         #                          self.layer_input.shape,
         #                          self.filter_shape,
         #                          stride=self.stride,
         #                          output_shape=self.padding)
         #
         #  return accum_grad
         */
        public override NDArray <Type> Backward(NDArray <Type> accumGrad)
        {
            var accumGrad0 = accumGrad.transpose(1, 2, 3, 0).ReShape(numFilters, -1);

            if (trainable)
            {
                var gradW = NDArray <Type> .Dot(accumGrad0, X_col.T).ReShape(W.Shape);

                var gradw0 = NumDN.Sum(accumGrad0, true, 1);
                W  = WOpt.Update(W, gradW);
                w0 = w0Opt.Update(w0, gradw0);
            }

            accumGrad0 = NDArray <Type> .Dot(W_col.T, accumGrad0);

            accumGrad0 = col2img(accumGrad0, layerInput.Shape, filterShape, stride, padding);

            return(accumGrad0);
        }
Ejemplo n.º 5
0
        public NDarray Backward(NDarray accumGrad)
        {
            accumGrad.MulFbInplace(lastXact, activation.Deriv);

            for (int i = 0; i < wTmp.Count; ++i)
            {
                wTmp.Data[i] = weights.Data[i];
            }

            if (IsTraining)
            {
                var gw = NDarray.GemmTABC(lastX, accumGrad);
                wOptm.Update(weights, gw);

                var gb = accumGrad.SumAxis0();
                bOptm.Update(biases, gb);
            }

            return(NDarray.GemmATBC(accumGrad, wTmp));
        }
Ejemplo n.º 6
0
        public NDarray <Type> Backward(NDarray <Type> accumGrad)
        {
            if (activationLayer != null)
            {
                accumGrad = activationLayer.Backward(accumGrad);
            }

            NDarray <Type> W = weights.T;

            if (IsTraining)
            {
                var gW = ND.TensorDot(LayerInput.T, accumGrad);
                weights = weightsOptmz.Update(weights, gW);

                if (useBiases)
                {
                    var gw0 = accumGrad.Sum(0, true);
                    biases = biasesOptmz.Update(biases, gw0);
                }
            }

            return(ND.TensorDot <Type>(accumGrad, W));
        }
Ejemplo n.º 7
0
        public override NDArray <Type> Backward(NDArray <Type> accumGrad)
        {
            var gamma0 = new NDArray <Type>(gamma);

            if (trainable)
            {
                var X_norm     = X_centered * stddev_inv;
                var grad_gamma = NumDN.Sum(accumGrad * X_norm, 0);
                var grad_beta  = NumDN.Sum(accumGrad, 0);

                gamma = gOpt.Update(gamma, grad_gamma);
                beta  = bOpt.Update(beta, grad_beta);
            }

            double batchSize = accumGrad.Shape[0];

            accumGrad = (1.0 / batchSize) * gamma0 * stddev_inv * (
                batchSize * accumGrad
                - NumDN.Sum(accumGrad, 0)
                - X_centered * stddev_inv * stddev_inv * NumDN.Sum(accumGrad * X_centered, 0)
                );

            return(accumGrad);
        }