Inheritance: BasicRandomizer
        /// <summary>
        /// Randomize the connections between two layers.
        /// </summary>
        /// <param name="network">The network to randomize.</param>
        /// <param name="fromLayer">The starting layer.</param>
        private void RandomizeSynapse(BasicNetwork network, int fromLayer)
        {
            int toLayer             = fromLayer + 1;
            int toCount             = network.GetLayerNeuronCount(toLayer);
            int fromCount           = network.GetLayerNeuronCount(fromLayer);
            int fromCountTotalCount = network.GetLayerTotalNeuronCount(fromLayer);
            IActivationFunction af  = network.GetActivation(toLayer);
            double low  = CalculateRange(af, Double.NegativeInfinity);
            double high = CalculateRange(af, Double.PositiveInfinity);

            double b = 0.7d * Math.Pow(toCount, (1d / fromCount)) / (high - low);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                if (fromCount != fromCountTotalCount)
                {
                    double w = RangeRandomizer.Randomize(-b, b);
                    network.SetWeight(fromLayer, fromCount, toNeuron, w);
                }
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = RangeRandomizer.Randomize(0, b);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
        public void Execute(IExampleInterface app)
        {
            var rangeRandom = new RangeRandomizer(-1, 1);
            var nwrRandom = new NguyenWidrowRandomizer(-1, 1);
            var fanRandom = new FanInRandomizer();
            var gaussianRandom = new GaussianRandomizer(0, 1);

            var training = new BasicMLDataSet(XOR_INPUT,
                                              XOR_IDEAL);
            BasicNetwork network = EncogUtility.SimpleFeedForward(2, 10, 0, 1, true);

            Console.WriteLine("Range random: "
                              + EvaluateRandomizer(rangeRandom, network, training));
            Console.WriteLine("Nguyen-Widrow: "
                              + EvaluateRandomizer(nwrRandom, network, training));
            Console.WriteLine("Fan-In: "
                              + EvaluateRandomizer(fanRandom, network, training));
            Console.WriteLine("Gaussian: "
                              + EvaluateRandomizer(gaussianRandom, network, training));
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Used internally to randomize a neuron. Usually called from
        /// randomizeNeuron or stimulateNeuron.
        /// </summary>
        ///
        /// <param name="targetLayer">The target layer.</param>
        /// <param name="neuron">The target neuron.</param>
        /// <param name="useRange">True if range randomization should be used.</param>
        /// <param name="low">The low-end of the range.</param>
        /// <param name="high">The high-end of the range.</param>
        /// <param name="usePercent">True if percent stimulation should be used.</param>
        /// <param name="percent">The percent to stimulate by.</param>
        private void RandomizeNeuron(int targetLayer, int neuron,
                                     bool useRange, double low, double high,
                                     bool usePercent, double percent)
        {
            IRandomizer d;

            if (useRange)
            {
                d = new RangeRandomizer(low, high);
            }
            else
            {
                d = new Distort(percent);
            }

            // check for errors
            _network.ValidateNeuron(targetLayer, neuron);

            // access the flat network
            FlatNetwork flat = _network.Structure.Flat;

            // allocate new weights now that we know how big the new weights will be
            var newWeights = new double[flat.Weights.Length];

            // construct the new weights
            int weightsIndex = 0;

            for (int fromLayer = flat.LayerCounts.Length - 2; fromLayer >= 0; fromLayer--)
            {
                int fromNeuronCount = _network
                    .GetLayerTotalNeuronCount(fromLayer);
                int toNeuronCount = _network
                    .GetLayerNeuronCount(fromLayer + 1);
                int toLayer = fromLayer + 1;

                for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++)
                {
                    for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++)
                    {
                        bool randomize = false;
                        if ((toLayer == targetLayer) && (toNeuron == neuron))
                        {
                            randomize = true;
                        }
                        else if ((fromLayer == targetLayer)
                                 && (fromNeuron == neuron))
                        {
                            randomize = true;
                        }

                        double weight = _network.GetWeight(fromLayer,
                                                          fromNeuron, toNeuron);

                        if (randomize)
                        {
                            weight = d.Randomize(weight);
                        }

                        newWeights[weightsIndex++] = weight;
                    }
                }
            }

            // swap in the new weights
            flat.Weights = newWeights;
        }
Ejemplo n.º 4
0
 /// <summary>
 ///     Makes random inputs by randomizing with encog randomize , the normal random from net framework library.
 ///     a quick and easy way to test data and and train networks.
 /// </summary>
 /// <param name="number">The number.</param>
 /// <returns></returns>
 public static double[] MakeRandomInputs(int number)
 {
     var rdn = new Random();
     var encogRnd = new RangeRandomizer(-1, 1);
     var x = new double[number];
     for (int i = 0; i < number; i++)
     {
         x[i] = encogRnd.Randomize((rdn.NextDouble()));
     }
     return x;
 }
Ejemplo n.º 5
0
 private void x107a35a18a760853(int xf8a5a30e3fab4279, int xad55484e91d7ad5a, bool xdef033fa0cd8af9a, double xd12d1dba8a023d95, double x628ea9b89457a2a9, bool x1fe041039f050e2f, double x4afa7e85b5b4d006)
 {
     IRandomizer randomizer;
     FlatNetwork flat;
     double[] numArray;
     int num;
     int num2;
     int layerTotalNeuronCount;
     int layerNeuronCount;
     int num5;
     int num6;
     int num7;
     bool flag;
     double num8;
     if (!xdef033fa0cd8af9a)
     {
         randomizer = new Distort(x4afa7e85b5b4d006);
         goto Label_0232;
     }
     goto Label_02A0;
     Label_000B:
     if (num6 < layerNeuronCount)
     {
         num7 = 0;
         if ((((uint) num2) - ((uint) num5)) >= 0)
         {
             goto Label_0041;
         }
         if ((((uint) num2) + ((uint) num)) <= uint.MaxValue)
         {
             goto Label_0232;
         }
         goto Label_0220;
     }
     num2--;
     Label_001A:
     if (num2 >= 0)
     {
         layerTotalNeuronCount = this._x87a7fc6a72741c2e.GetLayerTotalNeuronCount(num2);
         layerNeuronCount = this._x87a7fc6a72741c2e.GetLayerNeuronCount(num2 + 1);
         if ((((uint) layerTotalNeuronCount) - ((uint) x1fe041039f050e2f)) > uint.MaxValue)
         {
             goto Label_000B;
         }
         goto Label_0283;
     }
     flat.Weights = numArray;
     return;
     Label_002E:
     if (flag)
     {
         goto Label_0052;
     }
     Label_0032:
     numArray[num++] = num8;
     num7++;
     Label_0041:
     if (num7 < layerTotalNeuronCount)
     {
         flag = false;
         if ((((uint) xad55484e91d7ad5a) - ((uint) layerNeuronCount)) >= 0)
         {
             if (num5 == xf8a5a30e3fab4279)
             {
                 goto Label_010F;
             }
             if ((((uint) num5) | 0xfffffffe) != 0)
             {
                 goto Label_00AF;
             }
             goto Label_02DE;
         }
         if (((uint) layerTotalNeuronCount) >= 0)
         {
             goto Label_02A0;
         }
         if (((uint) x1fe041039f050e2f) >= 0)
         {
             goto Label_0232;
         }
         goto Label_0175;
     }
     Label_004A:
     num6++;
     goto Label_000B;
     Label_0052:
     num8 = randomizer.Randomize(num8);
     goto Label_0032;
     Label_0063:
     if (num7 == xad55484e91d7ad5a)
     {
         goto Label_00DB;
     }
     Label_006B:
     num8 = this._x87a7fc6a72741c2e.GetWeight(num2, num7, num6);
     goto Label_002E;
     Label_00AF:
     if (num2 != xf8a5a30e3fab4279)
     {
         goto Label_006B;
     }
     if (0 != 0)
     {
         goto Label_002E;
     }
     if (((uint) x4afa7e85b5b4d006) <= uint.MaxValue)
     {
         goto Label_0063;
     }
     if (((uint) num8) < 0)
     {
         goto Label_006B;
     }
     Label_00DB:
     flag = true;
     if (((uint) layerTotalNeuronCount) >= 0)
     {
         if ((((uint) xdef033fa0cd8af9a) | uint.MaxValue) != 0)
         {
             goto Label_006B;
         }
         if (((uint) layerTotalNeuronCount) <= uint.MaxValue)
         {
             goto Label_0063;
         }
         if (((uint) layerTotalNeuronCount) >= 0)
         {
             goto Label_02DE;
         }
         goto Label_02A0;
     }
     Label_010F:
     if (num6 != xad55484e91d7ad5a)
     {
         goto Label_00AF;
     }
     flag = true;
     goto Label_006B;
     Label_0175:
     num5 = num2 + 1;
     if (((uint) x4afa7e85b5b4d006) < 0)
     {
         goto Label_0041;
     }
     num6 = 0;
     goto Label_000B;
     Label_0220:
     numArray = new double[flat.Weights.Length];
     num = 0;
     num2 = flat.LayerCounts.Length - 2;
     goto Label_001A;
     Label_0232:
     if ((((uint) num5) + ((uint) xd12d1dba8a023d95)) > uint.MaxValue)
     {
         goto Label_004A;
     }
     Label_024D:
     this._x87a7fc6a72741c2e.ValidateNeuron(xf8a5a30e3fab4279, xad55484e91d7ad5a);
     flat = this._x87a7fc6a72741c2e.Structure.Flat;
     if ((((uint) layerNeuronCount) - ((uint) xf8a5a30e3fab4279)) >= 0)
     {
         goto Label_0220;
     }
     Label_0283:
     if ((((uint) layerTotalNeuronCount) - ((uint) num8)) <= uint.MaxValue)
     {
         goto Label_0175;
     }
     goto Label_0041;
     Label_02A0:
     randomizer = new RangeRandomizer(xd12d1dba8a023d95, x628ea9b89457a2a9);
     goto Label_024D;
     Label_02DE:
     if (0 == 0)
     {
         goto Label_0052;
     }
     goto Label_000B;
 }
Ejemplo n.º 6
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="seed"></param>
 public NguyenWidrowRandomizer(int seed)
 {
     this.rangeRandomizer = new RangeRandomizer(seed);
     ThreadSafeRandom.SetSeed(seed);
 }