Exemplo n.º 1
0
        public void RNN_Unit_Test_1()
        {
            var input = new Supervised.NeuralNetwork.Neuron()
            {
                ActivationFunction = new Math.Functions.Ident(),
                Input = 1.0
            };

            // hh = 0.00845734

            var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron()
            {
                ActivationFunction = new Math.Functions.Tanh(),
                MemoryGate = new Math.Functions.Logistic(),
                ResetGate = new Math.Functions.Logistic(),
                H = 0,
                Rb = 0.0,
                Zb = 0.0,
                Rh = 0.00822019,
                Rx = -0.00808389,
                Zh = 0.00486728,
                Zx = -0.0040537
            };

            // Z should equal approx. = 0.49898658
            // R should equal approx. = 0.49797904

            // htP should equal approx. = 0.00406561 /
            // H should equal approx. = 0.00202869

            Supervised.NeuralNetwork.Edge.Create(input, gru, 0.00845734);

            double output = gru.Evaluate();

            Almost.Equal(0.00422846, output, 0.002, "First pass");

            gru.Output = 1.5;

            double output2 = gru.Evaluate();

            Almost.Equal(0.00739980, output2, 0.002, "Second pass");
        }
Exemplo n.º 2
0
        public void RNN_Unit_Test_1()
        {
            var input = new Supervised.NeuralNetwork.Neuron()
            {
                ActivationFunction = new Math.Functions.Ident(),
                Input = 1.0
            };

            // hh = 0.00845734

            var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron()
            {
                ActivationFunction = new Math.Functions.Tanh(),
                UpdateGate         = new Math.Functions.Logistic(),
                ResetGate          = new Math.Functions.Logistic(),
                H  = 0,
                Rb = 0.0,
                Zb = 0.0,
                Rh = 0.00822019,
                Rx = -0.00808389,
                Zh = 0.00486728,
                Zx = -0.0040537
            };

            // Z should equal approx. = 0.49898658
            // R should equal approx. = 0.49797904

            // htP should equal approx. = 0.00406561 /
            // H should equal approx. = 0.00202869

            Supervised.NeuralNetwork.Edge.Create(input, gru, 0.00845734);

            double output = gru.Evaluate();

            Almost.Equal(0.00422846, output, 0.002, "First pass");

            gru.Output = 1.5;

            double output2 = gru.Evaluate();

            Almost.Equal(0.00739980, output2, 0.002, "Second pass");
        }
Exemplo n.º 3
0
        public void RNN_Unit_Test_2()
        {
            var input = new Supervised.NeuralNetwork.Neuron()
            {
                ActivationFunction = new Math.Functions.Ident()
            };

            input.Input = 10.0;

            var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron()
            {
                ActivationFunction = new Math.Functions.Tanh(),
                UpdateGate         = new Math.Functions.Logistic(),
                ResetGate          = new Math.Functions.Logistic(),
                H  = 0.0543,
                Rb = 1.5,
                Zb = -1.5,
                Rh = -0.00111453,
                Rx = 0.00112138,
                Zh = 0.00899571,
                Zx = 0.00999628,
                Hh = 0.00423760
            };

            Supervised.NeuralNetwork.Edge.Create(input, gru, 1.0);

            double output = gru.Evaluate();

            Almost.Equal(0.24144243, output, 0.00001, "1: Hidden state");
            Almost.Equal(0.81923206, gru.R, 0.00001, "1: Reset value");
            Almost.Equal(0.19788773, gru.Z, 0.00001, "1: Update value");

            input.Input = 20.0;

            double output2 = gru.Evaluate();

            Almost.Equal(0.40416687, output2, 0.00001, "Second pass");
            Almost.Equal(0.82085611, gru.R, 0.00001, "2: Reset value");
            Almost.Equal(0.21451824, gru.Z, 0.00001, "2: Update value");
        }
Exemplo n.º 4
0
        public void RNN_Unit_Test_2()
        {
            var input = new Supervised.NeuralNetwork.Neuron()
            {
                ActivationFunction = new Math.Functions.Ident(),
                Output             = 10.0
            };

            var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron()
            {
                ActivationFunction = new Math.Functions.Tanh(),
                MemoryGate         = new Math.Functions.Logistic(),
                ResetGate          = new Math.Functions.Logistic(),
                H  = 0.0543,
                Rb = 1.5,
                Zb = -1.5,
                Rh = -0.00111453,
                Rx = 0.00112138,
                Zh = 0.00899571,
                Zx = 0.00999628,
            };

            Supervised.NeuralNetwork.Edge.Create(input, gru, 1.0);

            double output = gru.Evaluate();

            //Assert.Equal(0.24144242, output, 0.002, "First pass");
            Almost.Equal(0.18775503, output, 0.002, "First pass");

            input.Output = 20.0;

            double output2 = gru.Evaluate();

            //Assert.Equal(0.40416686, output2, 0.002, "Second pass");
            Almost.Equal(0.30399969, output2, 0.002, "Second pass");
        }
Exemplo n.º 5
0
        public void RNN_Unit_Test_2()
        {
            var input = new Supervised.NeuralNetwork.Neuron()
            {
                ActivationFunction = new Math.Functions.Ident(),
                Output = 10.0
            };

            var gru = new Supervised.NeuralNetwork.Recurrent.RecurrentNeuron()
            {
                ActivationFunction = new Math.Functions.Tanh(),
                MemoryGate = new Math.Functions.Logistic(),
                ResetGate = new Math.Functions.Logistic(),
                H = 0.0543,
                Rb = 1.5,
                Zb = -1.5,
                Rh = -0.00111453,
                Rx = 0.00112138,
                Zh = 0.00899571,
                Zx = 0.00999628,
            };

            Supervised.NeuralNetwork.Edge.Create(input, gru, 1.0);

            double output = gru.Evaluate();

            //Assert.Equal(0.24144242, output, 0.002, "First pass");
            Almost.Equal(0.18775503, output, 0.002, "First pass");

            input.Output = 20.0;

            double output2 = gru.Evaluate();

            //Assert.Equal(0.40416686, output2, 0.002, "Second pass");
            Almost.Equal(0.30399969, output2, 0.002, "Second pass");
        }