Ejemplo n.º 1
0
 /// <summary>
 ///     Construct the backpropagation trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data to use.</param>
 /// <param name="theLearningRate">The learning rate.  Can be changed as training runs.</param>
 /// <param name="theMomentum">The momentum.  Can be changed as training runs.</param>
 public BackPropagation(BasicNetwork theNetwork, IList<BasicData> theTraining, double theLearningRate,
     double theMomentum)
 {
     BatchSize = 500;
     Stochastic = new MersenneTwisterGenerateRandom();
     NesterovUpdate = true;
     _network = theNetwork;
     _training = theTraining;
     LearningRate = theLearningRate;
     Momentum = theMomentum;
     _gradients = new GradientCalc(_network, new CrossEntropyErrorFunction(), this);
     _lastDelta = new double[theNetwork.Weights.Length];
 }
Ejemplo n.º 2
0
 /// <summary>
 ///     Construct the backpropagation trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data to use.</param>
 /// <param name="theLearningRate">The learning rate.  Can be changed as training runs.</param>
 /// <param name="theMomentum">The momentum.  Can be changed as training runs.</param>
 public BackPropagation(BasicNetwork theNetwork, IList <BasicData> theTraining, double theLearningRate,
                        double theMomentum)
 {
     BatchSize      = 500;
     Stochastic     = new MersenneTwisterGenerateRandom();
     NesterovUpdate = true;
     _network       = theNetwork;
     _training      = theTraining;
     LearningRate   = theLearningRate;
     Momentum       = theMomentum;
     _gradients     = new GradientCalc(_network, new CrossEntropyErrorFunction(), this);
     _lastDelta     = new double[theNetwork.Weights.Length];
 }
Ejemplo n.º 3
0
        /// <summary>
        ///     Construct a RPROP trainer.
        /// </summary>
        /// <param name="theNetwork">The network.</param>
        /// <param name="theTraining">The training data.</param>
        public ResilientPropagation(BasicNetwork theNetwork, IList <BasicData> theTraining)
        {
            _network       = theNetwork;
            _training      = theTraining;
            _gradients     = new GradientCalc(_network, new CrossEntropyErrorFunction(), this);
            _lastDelta     = new double[theNetwork.Weights.Length];
            _updateValues  = new double[theNetwork.Weights.Length];
            _lastGradients = new double[theNetwork.Weights.Length];

            for (var i = 0; i < _updateValues.Length; i++)
            {
                _updateValues[i] = DEFAULT_INITIAL_UPDATE;
            }
        }
Ejemplo n.º 4
0
        /// <inheritdoc />
        public override void ComputeGradient(GradientCalc calc)
        {
            var prev = Owner.GetPreviousLayer(this);
            var fromLayerSize = prev.TotalCount;
            var toLayerSize = Count;

            // Calculate the output for each filter (depth).
            for (var dOutput = 0; dOutput < _numFilters; dOutput++)
            {
                for (var dInput = 0; dInput < _inDepth; dInput++)
                {
                    ComputeGradient(calc, 0, 0, fromLayerSize, toLayerSize);
                }
            }
        }
Ejemplo n.º 5
0
        /// <summary>
        ///     Compute gradients for this layer.
        /// </summary>
        /// <param name="calc">The gradient calculator.</param>
        /// <param name="inputOffset">The input offset.</param>
        /// <param name="outputOffset">The output offset.</param>
        /// <param name="fromLayerSize">The from layer size.</param>
        /// <param name="toLayerSize">The to layer size.</param>
        public void ComputeGradient(GradientCalc calc, int inputOffset, int outputOffset, int fromLayerSize,
            int toLayerSize)
        {
            var prev = Owner.GetPreviousLayer(this);
            var fromLayerIndex = prev.NeuronIndex;
            var toLayerIndex = NeuronIndex;
            var weightSize = WeightDepthUnit;
            var outputSize = NeuronDepthUnit;


            var index = WeightIndex + weightSize*inputOffset; // this.weightIndex[currentLevel];
            var activation = Activation;

            // handle weights
            // array references are made method local to avoid one indirection
            var layerDelta = calc.LayerDelta;
            var weights = Owner.Weights;
            var layerOutput = Owner.LayerOutput;
            var layerSums = Owner.LayerSums;
            var y = fromLayerIndex;
            for (var yi = 0; yi < fromLayerSize; yi++)
            {
                var output = layerOutput[y];
                double sum = 0;

                var wi = index + yi;

                for (var xi = 0; xi < toLayerSize; xi++, wi += fromLayerSize)
                {
                    var x = xi + toLayerIndex;

                    if (prev.IsActive(yi) && IsActive(xi))
                        calc.Gradients[wi] += -(output*layerDelta[x]);
                    sum += weights[wi]*layerDelta[x];
                }
                layerDelta[y] = sum
                                *activation.DerivativeFunction(layerSums[y], layerOutput[y]);

                y++;
            }
        }
Ejemplo n.º 6
0
 /// <inheritdoc />
 public abstract void ComputeGradient(GradientCalc calc);
Ejemplo n.º 7
0
        /// <summary>
        ///     Construct a RPROP trainer.
        /// </summary>
        /// <param name="theNetwork">The network.</param>
        /// <param name="theTraining">The training data.</param>
        public ResilientPropagation(BasicNetwork theNetwork, IList<BasicData> theTraining)
        {
            LastError = 1.0;
            _network = theNetwork;
            _training = theTraining;
            _gradients = new GradientCalc(_network, new CrossEntropyErrorFunction(), this);
            _lastDelta = new double[theNetwork.Weights.Length];
            _updateValues = new double[theNetwork.Weights.Length];
            _lastGradients = new double[theNetwork.Weights.Length];

            for (var i = 0; i < _updateValues.Length; i++)
            {
                _updateValues[i] = DEFAULT_INITIAL_UPDATE;
            }
        }
Ejemplo n.º 8
0
 /// <inheritdoc />
 public override void ComputeGradient(GradientCalc calc)
 {
     var prev = Owner.GetPreviousLayer(this);
     var fromLayerSize = prev.TotalCount;
     var toLayerSize = Count;
     ComputeGradient(calc, 0, 0, fromLayerSize, toLayerSize);
 }
Ejemplo n.º 9
0
 /// <inheritdoc />
 public void ComputeGradient(GradientCalc calc)
 {
     throw new NotImplementedException();
 }