A randomizer that distorts what is already present in the neural network.
Inheritance: BasicRandomizer
Beispiel #1
0
 private void x107a35a18a760853(int xf8a5a30e3fab4279, int xad55484e91d7ad5a, bool xdef033fa0cd8af9a, double xd12d1dba8a023d95, double x628ea9b89457a2a9, bool x1fe041039f050e2f, double x4afa7e85b5b4d006)
 {
     IRandomizer randomizer;
     FlatNetwork flat;
     double[] numArray;
     int num;
     int num2;
     int layerTotalNeuronCount;
     int layerNeuronCount;
     int num5;
     int num6;
     int num7;
     bool flag;
     double num8;
     if (!xdef033fa0cd8af9a)
     {
         randomizer = new Distort(x4afa7e85b5b4d006);
         goto Label_0232;
     }
     goto Label_02A0;
     Label_000B:
     if (num6 < layerNeuronCount)
     {
         num7 = 0;
         if ((((uint) num2) - ((uint) num5)) >= 0)
         {
             goto Label_0041;
         }
         if ((((uint) num2) + ((uint) num)) <= uint.MaxValue)
         {
             goto Label_0232;
         }
         goto Label_0220;
     }
     num2--;
     Label_001A:
     if (num2 >= 0)
     {
         layerTotalNeuronCount = this._x87a7fc6a72741c2e.GetLayerTotalNeuronCount(num2);
         layerNeuronCount = this._x87a7fc6a72741c2e.GetLayerNeuronCount(num2 + 1);
         if ((((uint) layerTotalNeuronCount) - ((uint) x1fe041039f050e2f)) > uint.MaxValue)
         {
             goto Label_000B;
         }
         goto Label_0283;
     }
     flat.Weights = numArray;
     return;
     Label_002E:
     if (flag)
     {
         goto Label_0052;
     }
     Label_0032:
     numArray[num++] = num8;
     num7++;
     Label_0041:
     if (num7 < layerTotalNeuronCount)
     {
         flag = false;
         if ((((uint) xad55484e91d7ad5a) - ((uint) layerNeuronCount)) >= 0)
         {
             if (num5 == xf8a5a30e3fab4279)
             {
                 goto Label_010F;
             }
             if ((((uint) num5) | 0xfffffffe) != 0)
             {
                 goto Label_00AF;
             }
             goto Label_02DE;
         }
         if (((uint) layerTotalNeuronCount) >= 0)
         {
             goto Label_02A0;
         }
         if (((uint) x1fe041039f050e2f) >= 0)
         {
             goto Label_0232;
         }
         goto Label_0175;
     }
     Label_004A:
     num6++;
     goto Label_000B;
     Label_0052:
     num8 = randomizer.Randomize(num8);
     goto Label_0032;
     Label_0063:
     if (num7 == xad55484e91d7ad5a)
     {
         goto Label_00DB;
     }
     Label_006B:
     num8 = this._x87a7fc6a72741c2e.GetWeight(num2, num7, num6);
     goto Label_002E;
     Label_00AF:
     if (num2 != xf8a5a30e3fab4279)
     {
         goto Label_006B;
     }
     if (0 != 0)
     {
         goto Label_002E;
     }
     if (((uint) x4afa7e85b5b4d006) <= uint.MaxValue)
     {
         goto Label_0063;
     }
     if (((uint) num8) < 0)
     {
         goto Label_006B;
     }
     Label_00DB:
     flag = true;
     if (((uint) layerTotalNeuronCount) >= 0)
     {
         if ((((uint) xdef033fa0cd8af9a) | uint.MaxValue) != 0)
         {
             goto Label_006B;
         }
         if (((uint) layerTotalNeuronCount) <= uint.MaxValue)
         {
             goto Label_0063;
         }
         if (((uint) layerTotalNeuronCount) >= 0)
         {
             goto Label_02DE;
         }
         goto Label_02A0;
     }
     Label_010F:
     if (num6 != xad55484e91d7ad5a)
     {
         goto Label_00AF;
     }
     flag = true;
     goto Label_006B;
     Label_0175:
     num5 = num2 + 1;
     if (((uint) x4afa7e85b5b4d006) < 0)
     {
         goto Label_0041;
     }
     num6 = 0;
     goto Label_000B;
     Label_0220:
     numArray = new double[flat.Weights.Length];
     num = 0;
     num2 = flat.LayerCounts.Length - 2;
     goto Label_001A;
     Label_0232:
     if ((((uint) num5) + ((uint) xd12d1dba8a023d95)) > uint.MaxValue)
     {
         goto Label_004A;
     }
     Label_024D:
     this._x87a7fc6a72741c2e.ValidateNeuron(xf8a5a30e3fab4279, xad55484e91d7ad5a);
     flat = this._x87a7fc6a72741c2e.Structure.Flat;
     if ((((uint) layerNeuronCount) - ((uint) xf8a5a30e3fab4279)) >= 0)
     {
         goto Label_0220;
     }
     Label_0283:
     if ((((uint) layerTotalNeuronCount) - ((uint) num8)) <= uint.MaxValue)
     {
         goto Label_0175;
     }
     goto Label_0041;
     Label_02A0:
     randomizer = new RangeRandomizer(xd12d1dba8a023d95, x628ea9b89457a2a9);
     goto Label_024D;
     Label_02DE:
     if (0 == 0)
     {
         goto Label_0052;
     }
     goto Label_000B;
 }
Beispiel #2
0
        /// <summary>
        /// Used internally to randomize a neuron. Usually called from
        /// randomizeNeuron or stimulateNeuron.
        /// </summary>
        ///
        /// <param name="targetLayer">The target layer.</param>
        /// <param name="neuron">The target neuron.</param>
        /// <param name="useRange">True if range randomization should be used.</param>
        /// <param name="low">The low-end of the range.</param>
        /// <param name="high">The high-end of the range.</param>
        /// <param name="usePercent">True if percent stimulation should be used.</param>
        /// <param name="percent">The percent to stimulate by.</param>
        private void RandomizeNeuron(int targetLayer, int neuron,
                                     bool useRange, double low, double high,
                                     bool usePercent, double percent)
        {
            IRandomizer d;

            if (useRange)
            {
                d = new RangeRandomizer(low, high);
            }
            else
            {
                d = new Distort(percent);
            }

            // check for errors
            _network.ValidateNeuron(targetLayer, neuron);

            // access the flat network
            FlatNetwork flat = _network.Structure.Flat;

            // allocate new weights now that we know how big the new weights will be
            var newWeights = new double[flat.Weights.Length];

            // construct the new weights
            int weightsIndex = 0;

            for (int fromLayer = flat.LayerCounts.Length - 2; fromLayer >= 0; fromLayer--)
            {
                int fromNeuronCount = _network
                    .GetLayerTotalNeuronCount(fromLayer);
                int toNeuronCount = _network
                    .GetLayerNeuronCount(fromLayer + 1);
                int toLayer = fromLayer + 1;

                for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++)
                {
                    for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++)
                    {
                        bool randomize = false;
                        if ((toLayer == targetLayer) && (toNeuron == neuron))
                        {
                            randomize = true;
                        }
                        else if ((fromLayer == targetLayer)
                                 && (fromNeuron == neuron))
                        {
                            randomize = true;
                        }

                        double weight = _network.GetWeight(fromLayer,
                                                          fromNeuron, toNeuron);

                        if (randomize)
                        {
                            weight = d.Randomize(weight);
                        }

                        newWeights[weightsIndex++] = weight;
                    }
                }
            }

            // swap in the new weights
            flat.Weights = newWeights;
        }
        /// <summary>
        /// Stimulate the specified neuron by the specified percent. This is used to
        /// randomize the weights and bias values for weak neurons. 
        /// </summary>
        /// <param name="percent">The percent to randomize by.</param>
        /// <param name="layer">The layer that the neuron is on.</param>
        /// <param name="neuron">The neuron to randomize.</param>
        public void StimulateNeuron(double percent, ILayer layer,
                int neuron)
        {
            Distort d = new Distort(percent);

            if (layer.HasBias)
            {
                layer.BiasWeights[neuron] = d.Randomize(layer.BiasWeights[neuron]);
            }

            // calculate the outbound significance
            foreach (ISynapse synapse in layer.Next)
            {
                for (int i = 0; i < synapse.ToNeuronCount; i++)
                {
                    double v = synapse.WeightMatrix[neuron, i];
                    synapse.WeightMatrix[neuron, i] = d.Randomize(v);
                }
            }

            ICollection<ISynapse> inboundSynapses = this.network.Structure
                    .GetPreviousSynapses(layer);

            foreach (ISynapse synapse in inboundSynapses)
            {
                for (int i = 0; i < synapse.FromNeuronCount; i++)
                {
                    double v = synapse.WeightMatrix[i, neuron];
                    synapse.WeightMatrix[i, neuron] = d.Randomize(v);
                }
            }
        }