コード例 #1
0
        /// <summary>
        /// Process one training set element.
        /// </summary>
        /// <param name="outputNeuron">The output neuron.</param>
        private void Process(int outputNeuron, IMLDataPair pair)
        {
            _flat.Compute(pair.Input, _actual);

            double e = pair.Ideal[outputNeuron] - _actual[outputNeuron];

            _error += e * e;

            for (int i = 0; i < _actual.Length; i++)
            {
                if (i == outputNeuron)
                {
                    _layerDelta[i] = _flat.ActivationFunctions[0]
                                     .DerivativeFunction(_layerSums[i],
                                                         _layerOutput[i]);
                }
                else
                {
                    _layerDelta[i] = 0;
                }
            }

            for (int i = _flat.BeginTraining; i < _flat.EndTraining; i++)
            {
                ProcessLevel(i);
            }

            // calculate gradients
            for (int j = 0; j < _weights.Length; j++)
            {
                _gradients[j] += e * _derivative[j];
                _totDeriv[j]  += _derivative[j];
            }
        }
コード例 #2
0
        /// <summary>
        /// Process one training set element.
        /// </summary>
        ///
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        /// <param name="s">The significance of this error.</param>
        private void Process(IMLDataPair pair)
        {
            _network.Compute(pair.Input, _actual);

            _errorCalculation.UpdateError(_actual, pair.Ideal, pair.Significance);

            // Calculate error for the output layer.
            _ef.CalculateError(
                _network.ActivationFunctions[0], _layerSums, _layerOutput,
                pair.Ideal, _actual, _layerDelta, _flatSpot[0],
                pair.Significance);

            // Apply regularization, if requested.
            if (_owner.L1 > EncogFramework.DefaultDoubleEqual ||
                _owner.L1 > EncogFramework.DefaultDoubleEqual)
            {
                double[] lp = new double[2];
                CalculateRegularizationPenalty(lp);
                for (int i = 0; i < _actual.Length; i++)
                {
                    double p = (lp[0] * _owner.L1) + (lp[1] * _owner.L2);
                    _layerDelta[i] += p;
                }
            }

            // Propagate backwards (chain rule from calculus).
            for (int i = _network.BeginTraining; i < _network
                 .EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }
コード例 #3
0
        /// <summary>
        /// Process one training set element.
        /// </summary>
        ///
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        /// <param name="s">The significance of this error.</param>
        private void Process(IMLDataPair pair)
        {
            _network.Compute(pair.Input, _actual);

            _errorCalculation.UpdateError(_actual, pair.Ideal, pair.Significance);
            _ef.CalculateError(pair.Ideal, _actual, _layerDelta);

            for (int i = 0; i < _actual.Length; i++)
            {
                _layerDelta[i] = (_network.ActivationFunctions[0]
                                  .DerivativeFunction(_layerSums[i], _layerOutput[i]) + _flatSpot[0])
                                 * _layerDelta[i] * pair.Significance;
            }

            for (int i = _network.BeginTraining; i < _network.EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }
コード例 #4
0
        /// <summary>
        /// Process one training set element.
        /// </summary>
        ///
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        /// <param name="s">The significance of this error.</param>
        private void Process(double[] input, double[] ideal, double s)
        {
            _network.Compute(input, _actual);

            _errorCalculation.UpdateError(_actual, ideal, s);
            _ef.CalculateError(ideal, _actual, _layerDelta);

            for (int i = 0; i < _actual.Length; i++)
            {
                _layerDelta[i] = (_network.ActivationFunctions[0]
                                  .DerivativeFunction(_layerSums[i], _layerOutput[i]) + _flatSpot[0])
                                 * _layerDelta[i] * s;
            }

            for (int i = _network.BeginTraining; i < _network.EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }
コード例 #5
0
        public void Process(IMLDataPair pair)
        {
            _errorCalculation = new ErrorCalculation();

            double[] actual = new double[_flat.OutputCount];

            _flat.Compute(pair.Input, actual);

            _errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance);

            // Calculate error for the output layer.
            _errorFunction.CalculateError(
                _flat.ActivationFunctions[0], _flat.LayerSums, _flat.LayerOutput,
                pair.Ideal, actual, _layerDelta, 0,
                pair.Significance);

            // Apply regularization, if requested.
            if (L1 > EncogFramework.DefaultDoubleEqual ||
                L2 > EncogFramework.DefaultDoubleEqual)
            {
                double[] lp = new double[2];
                CalculateRegularizationPenalty(lp);
                for (int i = 0; i < actual.Length; i++)
                {
                    double p = (lp[0] * L1) + (lp[1] * L2);
                    _layerDelta[i] += p;
                }
            }

            // Propagate backwards (chain rule from calculus).
            for (int i = _flat.BeginTraining; i < _flat
                 .EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }