コード例 #1
0
        /// <summary>
        /// Randomize the connections between two layers.
        /// </summary>
        /// <param name="network">The network to randomize.</param>
        /// <param name="fromLayer">The starting layer.</param>
        private void RandomizeSynapse(BasicNetwork network, int fromLayer)
        {
            int toLayer             = fromLayer + 1;
            int toCount             = network.GetLayerNeuronCount(toLayer);
            int fromCount           = network.GetLayerNeuronCount(fromLayer);
            int fromCountTotalCount = network.GetLayerTotalNeuronCount(fromLayer);
            IActivationFunction af  = network.GetActivation(toLayer);
            double low  = CalculateRange(af, Double.NegativeInfinity);
            double high = CalculateRange(af, Double.PositiveInfinity);

            double b = 0.7d * Math.Pow(toCount, (1d / fromCount)) / (high - low);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                if (fromCount != fromCountTotalCount)
                {
                    double w = RangeRandomizer.Randomize(-b, b);
                    network.SetWeight(fromLayer, fromCount, toNeuron, w);
                }
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = RangeRandomizer.Randomize(0, b);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
コード例 #2
0
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public override void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount   = network.GetLayerNeuronCount(fromLayer + 1);

            for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
            {
                for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                {
                    double v = CalculateValue(toCount);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, v);
                }
            }
        }
コード例 #3
0
        /// <summary>
        ///     The Xaiver initialization works layer by layer.
        /// </summary>
        /// <param name="network">The network.</param>
        /// <param name="fromLayer">The source layer.</param>
        private void RandomizeLayer(BasicNetwork network, int fromLayer)
        {
            var fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            var toCount   = network.Layers[fromLayer + 1].Count;

            for (var fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
            {
                for (var toNeuron = 0; toNeuron < toCount; toNeuron++)
                {
                    var sigma = Math.Sqrt(2.0 / (fromCount + toCount));
                    var w     = Rnd.NextGaussian() * sigma;
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
コード例 #4
0
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public virtual void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount   = network.GetLayerNeuronCount(fromLayer + 1);

            for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
            {
                for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                {
                    double v = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    v = Randomize(v);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, v);
                }
            }
        }
コード例 #5
0
        /// <summary>
        ///     The Xaiver initialization works layer by layer.
        /// </summary>
        /// <param name="network">The network.</param>
        /// <param name="fromLayer">The source layer.</param>
        private void RandomizeLayer(BasicNetwork network, int fromLayer)
        {
            var fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            var toCount = network.Layers[fromLayer + 1].Count;

            for (var fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
            {
                for (var toNeuron = 0; toNeuron < toCount; toNeuron++)
                {
                    var sigma = Math.Sqrt(2.0/(fromCount + toCount));
                    var w = Rnd.NextGaussian()*sigma;
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
コード例 #6
0
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public override void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount   = network.GetLayerNeuronCount(fromLayer + 1);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                double n = 0.0;
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    n += w * w;
                }
                n = Math.Sqrt(n);


                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    w = _beta * w / n;
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }