Exemple #1
0
        // tests if the network successfully calculates a complex formula
        public static void testCalculation()
        {
            SlimRnn rnn = new SlimRnn();

            rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());                          // [0] input
            rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());                          // [1] input
            rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());                          // [2] input
            rnn.neurons.Add(new SlimRnnNeuron(SlimRnnNeuron.EnumType.ADDITIVE));       // [3]
            rnn.neurons.Add(new SlimRnnNeuron(SlimRnnNeuron.EnumType.MULTIPLICATIVE)); // [4] output
            rnn.neurons.Add(new SlimRnnNeuron(SlimRnnNeuron.EnumType.ADDITIVE));       // [5] termination

            rnn.outputNeuronsStartIndex = 4;
            rnn.numberOfOutputNeurons   = 1;

            rnn.terminatingNeuronIndex     = 5;
            rnn.terminatingNeuronThreshold = 0.5f;

            rnn.numberOfInputNeurons = 3;

            rnn.t_lim = 5; // are actually less steps but fine for testing

            rnn.world = new TestWorld();

            rnn.neurons[0].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[0], rnn.neurons[3], 0.5f, 0));
            rnn.neurons[1].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[1], rnn.neurons[3], 0.81f, 0));
            rnn.neurons[2].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[2], rnn.neurons[4], 1.5f, 0));
            rnn.neurons[3].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[3], rnn.neurons[4], 0.5f, 0));
            rnn.neurons[4].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[4], rnn.neurons[5], 5000.0f, 0)); // terminate if output is set

            rnn.initializeNeurons();

            IList <SlimRnnNeuronWithWeight> trace;
            double time;
            bool   wasTerminated;

            rnn.spread(out trace, out time, out wasTerminated);
            Debug.Assert(wasTerminated); // must terminate
        }
Exemple #2
0
        // used for interactivly checking if the learning algorithm works correctly
        public static void interactiveCheckLearningAlgorithm()
        {
            SlimRnn rnn = new SlimRnn();

            rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());                    // [0] input
            rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());                    // [1] input
            rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());                    // [2] input
            rnn.neurons.Add(new SlimRnnNeuron(SlimRnnNeuron.EnumType.ADDITIVE)); // [3] termination/output
            rnn.neurons.Add(new SlimRnnNeuron(SlimRnnNeuron.EnumType.ADDITIVE)); // [4] output

            rnn.outputNeuronsStartIndex = 3;
            rnn.numberOfOutputNeurons   = 2;

            rnn.terminatingNeuronIndex     = 3;
            rnn.terminatingNeuronThreshold = 0.5f;

            rnn.numberOfInputNeurons = 3;

            rnn.t_lim = 5; // are actually less steps but fine for testing

            rnn.world = new TestWorld();

            // insert unused neurons as possible neurons for learning
            rnn.neurons[0].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[0], rnn.neurons[3], 0.0f, 0, true));
            rnn.neurons[0].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[0], rnn.neurons[4], 0.0f, 1, true));
            rnn.neurons[1].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[1], rnn.neurons[3], 1.0f, 0));
            rnn.neurons[1].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[1], rnn.neurons[4], 0.0f, 1, true));
            rnn.neurons[2].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[2], rnn.neurons[3], 1.0f, 0));

            rnn.initializeNeurons();

            UniversalSlimRnnSearch learningAlgorithm = new UniversalSlimRnnSearch(rnn, new AlwaysSuccessfulTester());

            learningAlgorithm.weightWithPropabilityTable = new List <UniversalSlimRnnSearch.WeightWithPropability> {
                /*new UniversalSlimRnnSearch.WeightWithPropability(-50.0f, 0.01),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-40.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-30.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-20.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-10.0f, 0.02),
                 *
                 * new UniversalSlimRnnSearch.WeightWithPropability(-9.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-8.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-7.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-6.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-5.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-4.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-3.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-2.0f, 0.02),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-1.5f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-1.0f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.99f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.98f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.95f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.92f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.9f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.85f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.8f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.75f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.7f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.65f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.6f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.55f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.5f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.45f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.4f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.35f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.3f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.25f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.2f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.15f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.1f, 0.03),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.05f, 0.01),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.02f, 0.01),
                 * new UniversalSlimRnnSearch.WeightWithPropability(-0.01f, 0.01),
                 *
                 * // TODO< positive side >
                 */


                // just for testing
                new UniversalSlimRnnSearch.WeightWithPropability(1.0f, 0.4),
                new UniversalSlimRnnSearch.WeightWithPropability(-1.0f, 0.4),
                new UniversalSlimRnnSearch.WeightWithPropability(0.5f, 0.1),
                new UniversalSlimRnnSearch.WeightWithPropability(-0.5f, 0.1),
            };

            // invoke learning algorithm
            bool    wasSolved;
            SlimRnn solutionRnn;

            learningAlgorithm.search(1, true, out wasSolved, out solutionRnn);
        }
Exemple #3
0
        public Solver returnInitialProgram()
        {
            Solver solver = new Solver();

            solver.slimRnn = new SlimRnn();

            SlimRnn rnn = solver.slimRnn;

            // neurons for the retina input
            for (int i = 0; i < 5 * 5; i++)
            {
                rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());
            }

            // neurons for periphal vision of the retina
            for (int i = 0; i < 3 * 3 - 1; i++)
            {
                rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());
            }

            neuronIndexConstantOne = (uint)rnn.neurons.Count;
            rnn.neurons.Add(SlimRnnNeuron.makeInputNeuron());


            uint neuronTerminationIndex = (uint)rnn.neurons.Count;

            // output neuron for termination
            rnn.neurons.Add(new SlimRnnNeuron(SlimRnnNeuron.EnumType.ADDITIVE));

            uint neuronOutputStartIndex = (uint)rnn.neurons.Count;
            uint numberOfOutputNeurons  = 8;

            // output neurons for controlling the retina
            for (int i = 0; i < numberOfOutputNeurons; i++)
            {
                rnn.neurons.Add(new SlimRnnNeuron(SlimRnnNeuron.EnumType.ADDITIVE));
            }

            rnn.outputNeuronsStartIndex = neuronOutputStartIndex;
            rnn.numberOfOutputNeurons   = numberOfOutputNeurons;

            rnn.terminatingNeuronIndex     = neuronTerminationIndex;
            rnn.terminatingNeuronThreshold = 0.5f;

            rnn.numberOfInputNeurons = (5 * 5) + (9 - 1) + 1 /* constant neuron */;

            rnn.t_lim = double.MaxValue; // is set by the learning algorithm


            // add and initialize "hidden" neurons

            neuronIndexOfHiddenUnits = (uint)rnn.neurons.Count;

            uint numberOfHiddenNeuronsWtaGroups = 50;
            uint numberOfNeuronsInWtaGroup      = 4; // 4 is a good number as chosen by Schmidhuber

            for (uint groupI = 0; groupI < numberOfHiddenNeuronsWtaGroups; groupI++)
            {
                for (int neuronI = 0; neuronI < numberOfNeuronsInWtaGroup; neuronI++)
                {
                    bool isNeuronIEvent = (neuronI % 2) == 0;
                    SlimRnnNeuron.EnumType neuronType = isNeuronIEvent ? SlimRnnNeuron.EnumType.ADDITIVE : SlimRnnNeuron.EnumType.MULTIPLICATIVE;

                    SlimRnnNeuron neuron = new SlimRnnNeuron(neuronType);
                    neuron.winnerTakesAllGroup = groupI;

                    rnn.neurons.Add(neuron);
                }
            }

            rnn.initializeNeurons();

            // set initial network


            { // wire the central input sensor to the termination
                int retinaX = 0;
                int retinaY = 0;

                int absoluteRetinaIndexX = 2 + retinaX;
                int absoluteRetinaIndexY = 2 + retinaY;

                int retinaInputNeuronIndex = 0 + 5 * absoluteRetinaIndexY + absoluteRetinaIndexX;

                rnn.neurons[retinaInputNeuronIndex].outNeuronsWithWeights.Add(new SlimRnnNeuronWithWeight(rnn.neurons[retinaInputNeuronIndex], rnn.neurons[(int)neuronTerminationIndex], 0.55f, 0));
            }
            return(solver);
        }