Example #1
0
        public override void Execute() //Task execution
        {
            MyLog.DEBUG.WriteLine("Pooling backward.");

            // pointer to previous layer
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // reset delta
                previousLayer.Delta.Fill(0);

                // determine input to previous layer
                CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                m_kernel.SetupExecution(Owner.Neurons);
                m_kernel.Run(
                    (int)previousLayer.ActivationFunction,
                    Owner.Delta,
                    previousLayer.Delta,
                    prevInputPtr,
                    Owner.ActivatedNeurons,
                    Owner.Neurons
                    );
            }
        }
        public override void Execute() //Task execution
        {
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // reset delta only if next is not Gaussian HACK.
                // (Gaussian layer already reseted delta and filled with regularization deltas)
                previousLayer.Delta.Fill(0);

                // determine input to previous layer
                CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                if (Owner.ParentNetwork.BatchSize == 1)
                {
                    // cuBLAS tends to be slower when BatchSize is 1, use the kernel instead
                    m_deltaKernel.SetupExecution(previousLayer.Neurons);
                    m_deltaKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,
                        Owner.Delta,
                        Owner.Weights,
                        Owner.ParentNetwork.Dropout,
                        previousLayer.Neurons,
                        Owner.Neurons
                        );
                }
                else
                {
                    // previousLayer.Delta = Transpose(Weights) x Delta
                    MyCublasFactory.Instance.Gemm(Operation.Transpose, Operation.NonTranspose,
                                                  previousLayer.Neurons, Owner.ParentNetwork.BatchSize, Owner.Neurons, 1.0f,
                                                  Owner.Weights.GetDevice(Owner), Owner.Neurons,
                                                  Owner.Delta.GetDevice(Owner), Owner.Neurons,
                                                  0.0f, previousLayer.Delta.GetDevice(Owner), previousLayer.Neurons
                                                  );

                    // multiply previousLayer.Delta by activation derivatives of previous layer
                    m_deltaBatchKernel.SetupExecution(previousLayer.Neurons * Owner.ParentNetwork.BatchSize);
                    m_deltaBatchKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,
                        Owner.ParentNetwork.Dropout,
                        previousLayer.Neurons,
                        Owner.ParentNetwork.BatchSize
                        );
                }
            }
        }
Example #3
0
        public override void Execute()
        {
            MyLog.DEBUG.WriteLine("Convolution backward.");

            // pointer to previous layer
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // reset delta
                previousLayer.Delta.Fill(0);

                // determine input to previous layer
                CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                m_kernel.SetupExecution(previousLayer.Neurons);
                m_kernel.Run(
                    (int)previousLayer.ActivationFunction,
                    Owner.Weights,
                    Owner.Delta,
                    previousLayer.Delta,
                    prevInputPtr,
                    Owner.FilterCount,
                    Owner.InputWidth * Owner.InputHeight,                                                                                     // input slice size without padding
                    (Owner.InputWidth + Owner.ZeroPadding + Owner.ZeroPadding) * (Owner.InputHeight + Owner.ZeroPadding + Owner.ZeroPadding), // input slice size
                    Owner.ZeroPadding,
                    Owner.InputWidth, Owner.InputHeight,
                    Owner.FilterWidth, Owner.FilterHeight,
                    Owner.FilterWidth * Owner.FilterHeight,
                    Owner.FilterWidth * Owner.FilterHeight * Owner.InputDepth,
                    Owner.OutputWidth, Owner.OutputHeight, Owner.OutputWidth * Owner.OutputHeight,
                    Owner.HorizontalStride, Owner.VerticalStride,
                    previousLayer.Neurons
                    );
            }
        }
        public override void Execute()
        {
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // Reset delta
                previousLayer.Delta.Fill(0);

                // Disable backprop when in generative mode
                if (!Owner.Generate.IsIncomingRised())
                {
                    // Set locations for mean deltas
                    CUdeviceptr meanDeltas = previousLayer.Delta.GetDevicePtr(Owner, 0);
                    // Set locations for sigma deltas
                    CUdeviceptr sigmaDeltas = previousLayer.Delta.GetDevicePtr(Owner, previousLayer.Delta.Count / 2);
                    // Determine input to previous layer
                    CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);
                    // set locations for sigmas (prev layer or constant
                    CUdeviceptr sigmas;
                    if (Owner.UseSigmaConstant)
                    {
                        sigmas = Owner.SigmaConstants.GetDevicePtr(Owner);
                    }
                    else
                    {
                        sigmas = Owner.Input.GetDevicePtr(Owner, Owner.Input.Count / 2);
                    }

                    m_samplingDeltaKernel.Run(
                        Convert.ToInt32(Owner.UseSigmaConstant),
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        sigmas,
                        meanDeltas,
                        sigmaDeltas,
                        Owner.Delta,
                        Owner.RandomNormal,
                        Owner.Neurons
                        );

                    // Regularization needs weights to compute gradients
                    if (Regularize && previousLayer != null && previousLayer is MyAbstractWeightLayer)
                    {
                        MyAbstractWeightLayer previousWeightLayer = previousLayer as MyAbstractWeightLayer;

                        // Try to regularize loss: mean^2 + sigma^2 - log(sigma^2)
                        // In other words regularize means to 0 and sigmas to 1
                        int weightCount = previousWeightLayer.Weights.Count;
                        m_regularizationDeltaKernel.SetConstantVariable <float>("RegularizationCoefficient", RegularizationCoefficient);
                        m_regularizationDeltaKernel.SetupExecution(weightCount);
                        m_regularizationDeltaKernel.Run(
                            Convert.ToInt32(Owner.UseSigmaConstant),
                            (int)previousLayer.ActivationFunction,
                            prevInputPtr,
                            previousLayer.Input,
                            previousWeightLayer.Weights,
                            previousLayer.Output.Count,
                            meanDeltas,
                            sigmaDeltas
                            );
                    }
                }
            }
        }
Example #5
0
        public override void Execute()
        {
            if (SimulationStep == 0)
            {
                return;
            }

            switch (Owner.LearningTasks)
            {
            case MyLSTMLayer.LearningTasksType.RTRL:
            {
                // propagate delta to output gates
                m_deltaKernel.Run(
                    Owner.CellStateErrors,
                    Owner.OutputGateDeltas,
                    Owner.CellStates,
                    Owner.OutputGateActivations,
                    Owner.OutputGateActivationDerivatives,
                    Owner.Delta,

                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );
                break;
            }

            case MyLSTMLayer.LearningTasksType.BPTT:
            {
                // propagate delta to output gates
                m_deltaKernel.Run(
                    Owner.Delta,
                    Owner.CellStates,
                    Owner.CellStates.GetTimeShiftedBlock(-1),
                    Owner.CellStateErrors,
                    Owner.CellStateErrors.GetTimeShiftedBlock(+1),

                    Owner.OutputGateDeltas,
                    Owner.ForgetGateDeltas,
                    Owner.ForgetGateDeltas.GetTimeShiftedBlock(+1),
                    Owner.InputGateDeltas,
                    Owner.InputGateDeltas.GetTimeShiftedBlock(+1),
                    Owner.CellInputDeltas,

                    Owner.CellInputActivations,
                    Owner.CellStateActivations,
                    Owner.OutputGateActivations,
                    Owner.ForgetGateActivations.GetTimeShiftedBlock(+1),
                    Owner.InputGateActivations,

                    Owner.CellInputActivationDerivatives,
                    Owner.CellStateActivationDerivatives,
                    Owner.OutputGateActivationDerivatives,
                    Owner.ForgetGateActivationDerivatives,
                    Owner.InputGateActivationDerivatives,

                    Owner.CellInputWeights,
                    Owner.OutputGateWeights,
                    Owner.ForgetGateWeights,
                    Owner.InputGateWeights,

                    Owner.Input.Count,
                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );

                m_gateGradientKernel.Run(
                    Owner.Input,
                    Owner.Output.GetTimeShiftedBlock(-1),
                    Owner.CellStates,

                    Owner.InputGateDeltas,
                    Owner.ForgetGateDeltas,
                    Owner.OutputGateDeltas,

                    Owner.OutputGateWeightGradient,
                    Owner.InputGateWeightGradient,
                    Owner.ForgetGateWeightGradient,

                    Owner.Input.Count,
                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );

                m_cellInputGradientKernel.Run(
                    Owner.Input,
                    Owner.Output.GetTimeShiftedBlock(-1),

                    Owner.CellInputDeltas,
                    Owner.CellInputWeightGradient,

                    Owner.Input.Count,
                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );
                break;
            }
            }

            MyNode node = Owner.GetInput(0).Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                CUdeviceptr prevInputPtr = nullCUdeviceptr;

                // reset delta
                if (Owner.ParentNetwork.TimeStep == 0)
                {
                    previousLayer.Delta.Fill(0);
                }

                // determine input to previous layer
                prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                switch (Owner.LearningTasks)
                {
                case MyLSTMLayer.LearningTasksType.RTRL:
                {
                    // propagate delta to previous layer
                    m_deltaBackKernel.SetupExecution(previousLayer.Neurons);
                    m_deltaBackKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,
                        Owner.CellStateErrors,
                        Owner.PreviousCellStates,
                        Owner.InputGateActivations,
                        Owner.CellInputActivationDerivatives,
                        Owner.InputGateActivationDerivatives,
                        Owner.ForgetGateActivationDerivatives,
                        Owner.CellInputWeights,
                        Owner.InputGateWeights,
                        Owner.ForgetGateWeights,
                        Owner.OutputGateWeights,
                        Owner.OutputGateDeltas,

                        previousLayer.Neurons,
                        Owner.CellStates.Count,
                        Owner.CellsPerBlock
                        );
                    break;
                }

                case MyLSTMLayer.LearningTasksType.BPTT:
                {
                    // propagate delta to previous layer
                    m_deltaBackKernel.SetupExecution(previousLayer.Neurons);
                    m_deltaBackKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,

                        Owner.CellInputDeltas,
                        Owner.OutputGateDeltas,
                        Owner.ForgetGateDeltas,
                        Owner.InputGateDeltas,

                        Owner.CellInputWeights,
                        Owner.InputGateWeights,
                        Owner.ForgetGateWeights,
                        Owner.OutputGateWeights,

                        previousLayer.Neurons,
                        Owner.CellStates.Count,
                        Owner.CellsPerBlock
                        );
                    break;
                }
                }
            }
        }