Esempio n. 1
0
        public override void InitMem()
        {
            forwardRNN.InitMem();
            backwardRNN.InitMem();

            //Create and intialise the weights from hidden to output layer, these are just normal weights
            Hidden2OutputWeight = new Matrix <double>(L2, L1);

            for (int i = 0; i < Hidden2OutputWeight.Height; i++)
            {
                for (int j = 0; j < Hidden2OutputWeight.Width; j++)
                {
                    Hidden2OutputWeight[i][j] = RandInitWeight();
                }
            }

            Hidden2OutputWeightLearningRate = new Matrix <double>(L2, L1);
        }