public RecursiveNeuralNetwork(int inputSize, float learningRate, float std)
        {
            w      = NumMath.Random(inputSize, inputSize + inputSize, std);
            wScore = NumMath.Random(1, inputSize, std);
            b      = NumMath.Repeat(inputSize, 1);

            this.learningRate = learningRate;
        }
        public RecurrentUnity(int input, int hidden, float learning_rate, float std)
        {
            input_size         = input;
            hidden_size        = hidden;
            this.learning_rate = learning_rate;

            Wxt = NumMath.Random(hidden_size, input_size, std);
            Wtt = NumMath.Random(hidden_size, hidden_size, std);
            bt  = NumMath.Repeat(hidden_size, 0);

            ResetAdagradParams();
        }
Esempio n. 3
0
        private static void Main(string[] args)
        {
            //Performance();
            ProcessingDevice.Device = DeviceType.CUDA;
            var m0 = NumMath.Random(1000, 1000, 1f);
            var arr = new Array<FloatArray2D>(200);
            var a = m0 * m0;

            Parallel.For(0, 200, i =>
            {
                Console.WriteLine($"Computing {i}");
                arr[i] = m0 * m0;
            });
        }
Esempio n. 4
0
        public RecursiveNeuralNetworkWithContext(int inputSize, float learningRate, float std)
        {
            int hSize = 20;

            wpl = NumMath.Random(inputSize, inputSize, std);
            wpr = NumMath.Random(inputSize, inputSize, std);
            wC  = NumMath.Random(inputSize, inputSize, std);
            wHP = NumMath.Random(hSize, inputSize, std);
            wHC = NumMath.Random(hSize, inputSize, std);
            wS  = NumMath.Random(1, hSize, 1e-10f);

            bC = NumMath.Repeat(inputSize, 1f / (float)inputSize);
            bH = NumMath.Repeat(hSize, 1f / (float)hSize);
            bP = NumMath.Repeat(inputSize, 1f / (float)inputSize);

            mwpl = NumMath.Array(inputSize, inputSize);
            mwpr = NumMath.Array(inputSize, inputSize);
            mwC  = NumMath.Array(inputSize, inputSize);
            mwHP = NumMath.Array(hSize, inputSize);
            mwHC = NumMath.Array(hSize, inputSize);
            mwS  = NumMath.Array(1, hSize);

            mbC = NumMath.Array(inputSize);
            mbH = NumMath.Array(hSize);
            mbP = NumMath.Array(inputSize);

            Adam_m_wpl = NumMath.Array(inputSize, inputSize);
            Adam_m_wpr = NumMath.Array(inputSize, inputSize);
            Adam_m_wC  = NumMath.Array(inputSize, inputSize);
            Adam_m_wHP = NumMath.Array(hSize, inputSize);
            Adam_m_wHC = NumMath.Array(hSize, inputSize);
            Adam_m_ws  = NumMath.Array(1, hSize);

            Adam_m_bC = NumMath.Array(inputSize);
            Adam_m_bH = NumMath.Array(hSize);
            Adam_m_bP = NumMath.Array(inputSize);

            Adam_v_wpl = NumMath.Array(inputSize, inputSize);
            Adam_v_wpr = NumMath.Array(inputSize, inputSize);
            Adam_v_wC  = NumMath.Array(inputSize, inputSize);
            Adam_v_wHP = NumMath.Array(hSize, inputSize);
            Adam_v_wHC = NumMath.Array(hSize, inputSize);
            Adam_v_ws  = NumMath.Array(1, hSize);

            Adam_v_bC = NumMath.Array(inputSize);
            Adam_v_bH = NumMath.Array(hSize);
            Adam_v_bP = NumMath.Array(inputSize);

            _learningRate = learningRate;
        }
Esempio n. 5
0
        public RecurrentNeuralNetwork(int input, int output, int hidden, float learning_rate, float std)
        {
            input_size         = input;
            output_size        = output;
            hidden_size        = hidden;
            this.learning_rate = learning_rate;

            Wxt = NumMath.Random(hidden_size, input_size, std);
            Wtt = NumMath.Random(hidden_size, hidden_size, std);
            Why = NumMath.Random(output_size, hidden_size, std);
            bt  = NumMath.Repeat(hidden_size, 0);
            by  = NumMath.Repeat(output_size, 0);

            ResetAdagradParams();
        }
        public RecursiveNeuralUnity(int inputSize, float learningRate, float std)
        {
            wpl   = NumMath.Random(inputSize, inputSize, std);
            wpr   = NumMath.Random(inputSize, inputSize, std);
            wpr   = NumMath.Random(inputSize, inputSize, std);
            wDeep = NumMath.Random(inputSize, inputSize, std);
            b     = NumMath.Repeat(inputSize, 1);

            mwpl   = NumMath.Random(inputSize, inputSize, 0);
            mwpr   = NumMath.Random(inputSize, inputSize, 0);
            mwpr   = NumMath.Random(inputSize, inputSize, 0);
            mwDeep = NumMath.Random(inputSize, inputSize, 0);
            mb     = NumMath.Repeat(inputSize, 0);

            this.learningRate = learningRate;
        }
Esempio n. 7
0
        public LSTM(int input, int hidden, float learning_rate, float std)
        {
            input_size         = input;
            hidden_size        = hidden;
            this.learning_rate = learning_rate;

            Wf = NumMath.Random(hidden_size, input_size + hidden_size, std) + .5f;
            Wi = NumMath.Random(hidden_size, input_size + hidden_size, std) + .5f;
            Wc = NumMath.Random(hidden_size, input_size + hidden_size, std);
            Wo = NumMath.Random(hidden_size, input_size + hidden_size, std) + .5f;

            Bf = NumMath.Repeat(hidden_size, 0);
            Bi = NumMath.Repeat(hidden_size, 0);
            Bc = NumMath.Repeat(hidden_size, 0);
            Bo = NumMath.Repeat(hidden_size, 0);

            ResetAdagradParams();
        }