public void ComputeTwiceGradientShouldYieldTheSameResult()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            var layer = new TanhLayer <double>();

            layer.Init(inputWidth, inputHeight, inputDepth);

            // Forward pass
            var input = BuilderInstance <double> .Volume.Random(new Shape(inputWidth, inputHeight, inputDepth));

            var output = layer.DoForward(input, true);

            // Set output gradients to 1
            var outputGradient = BuilderInstance <double> .Volume.SameAs(new double[output.Shape.TotalLength].Populate(1.0), output.Shape);

            // Backward pass to retrieve gradients
            layer.Backward(outputGradient);
            var step1 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray();

            layer.Backward(outputGradient);
            var step2 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray();

            Assert.IsTrue(step1.SequenceEqual(step2));
        }
Exemple #2
0
        public dynamic backward(dynamic grid)
        {
            var dh       = h_next_mul.backwardY(grid);
            var d_tanh_c = cell_tl.Backward(dh);
            //var dc_prev=c_next_mul.backwardY(d_tanh_c);


            var d_input_gate  = mulin_gate_mul.backward(d_tanh_c);
            var d_forget_gate = c_next_mul.backward(d_tanh_c);
            var d_cell_memory = mulin_gate_mul.backwardY(d_tanh_c);

            var d_output_gate = h_next_mul.backward(grid);// d_tanh_c
            var d_ai          = input_gate_s.Backward(d_input_gate);
            var d_af          = forget_gate_s.Backward(d_forget_gate);
            var d_ao          = output_gate_s.Backward(d_output_gate);
            var d_ac          = cell_memory_tl.Backward(d_cell_memory);

            var temp  = Matrix.cat(d_ai, d_af, 1);
            var temp2 = Matrix.cat(d_ac, d_ao, 1);
            var da    = Matrix.cat(temp, temp2, 1);

            // var daT=Matrix.T(da);
            ihweight = convLayerih.backweight(da);
            hhweight = convLayerhh.backweight(da);
            return(convLayerih.backward(da));
        }
Exemple #3
0
        public dynamic backward(dynamic grid)
        {
            var dy = convLayerhq.backward(grid);

            hqW = convLayerhq.backweight(grid);
            var DH   = dy;
            var DZHT = ZHTL.backward(dy);

            var dH_tilda = ZHTL.backwardY(dy);
            //var Dz1 = ZHTL.backward(DH);

            var Dz = ZHL.backward(DZHT);

            var DRHS = THL.Backward(dH_tilda);
            var DR   = RHM.backward(DRHS);//Add 因为加法的梯度等于本身 所以DRHS=DRH

            var DHHS = RHM.backwardY(DRHS);
            var DRS  = RSL.Backward(DR);
            var DZS  = ZSL.Backward(Dz);
            var temp = Matrix.cat(DZS, DRS, 1);
            var da   = Matrix.cat(temp, DHHS, 1);

            ihweight = convLayerih.backweight(da);
            hhweight = convLayerhh.backweight(da);
            return(convLayerih.backward(da));
        }