Beispiel #1
0
        public UpdateParams UpdateWeights(double[] _weights, int _numNetworks, double[] _prevWeightsDelta, double _learningRate, double _momentum, double[] _gradients, double[] _inputs)
        {
            UpdateParams _up = new UpdateParams();

            int _index  = 0;
            int _index2 = 0;

            for (int i = 0; i < _weights.Length; i += _numNetworks)                      // 0..2 (3)
            {
                for (int j = 0; j < _numNetworks; ++j)                                   // 0..3 (4)
                {
                    double delta = _learningRate * _gradients[j] * _inputs[_index2];     // compute the new delta = "eta * hGrad * input"
                    _weights[_index]           += delta;                                 // update
                    _weights[_index]           += _momentum * _prevWeightsDelta[_index]; // add mom_momentumentum using previous delta. on first pass old value will be 0.0 but that's OK.
                    _prevWeightsDelta[_index++] = delta;                                 // save the delta for next time
                }

                _index2++;
            }

            _up._weights = _weights;
            _up._delta   = _prevWeightsDelta;

            return(_up);
        }
Beispiel #2
0
        public UpdateParams UpdateBiases(double[] _biases, double[] _prevBiasesDelta, double _learningRate, double _momentum, double[] _gradients)
        {
            UpdateParams _up = new UpdateParams();

            for (int i = 0; i < _biases.Length; ++i)
            {
                double delta = _learningRate * _gradients[i] * 1.0; // the 1.0 is the constant input for any bias; could leave out
                _biases[i]         += delta;
                _biases[i]         += _momentum * _prevBiasesDelta[i];
                _prevBiasesDelta[i] = delta; // save delta
            }

            _up._weights = _biases;
            _up._delta   = _prevBiasesDelta;

            return(_up);
        }