Exemplo n.º 1
0
        /// <summary>
        ///     Process one training set element.
        /// </summary>
        /// <param name="errorCalc">The error calculation to use.</param>
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        public void Process(IErrorCalculation errorCalc, double[] input, double[] ideal)
        {
            _network.Compute(input, _actual);

            errorCalc.UpdateError(_actual, ideal, 1.0);

            // Calculate error for the output layer.
            var outputLayerIndex = _network.Layers.Count - 1;
            var outputActivation = _network.Layers[outputLayerIndex].Activation;

            errorFunction.CalculateError(
                outputActivation, _layerSums, _layerOutput,
                ideal, _actual, _layerDelta, 0, 1.0);

            // Apply regularization, if requested.
            if (_owner.L1 > AIFH.DefaultPrecision ||
                _owner.L1 > AIFH.DefaultPrecision)
            {
                var lp = new double[2];
                CalculateRegularizationPenalty(lp);
                for (var i = 0; i < _actual.Length; i++)
                {
                    var p = lp[0] * _owner.L1 + lp[1] * _owner.L2;
                    _layerDelta[i] += p;
                }
            }

            // Propagate backwards (chain rule from calculus).
            for (var i = _network.Layers.Count - 1; i > 0; i--)
            {
                var layer = _network.Layers[i];
                layer.ComputeGradient(this);
            }
        }
Exemplo n.º 2
0
        /// <summary>
        /// Process one training set element.
        /// </summary>
        ///
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        /// <param name="s">The significance of this error.</param>
        private void Process(IMLDataPair pair)
        {
            _network.Compute(pair.Input, _actual);

            _errorCalculation.UpdateError(_actual, pair.Ideal, pair.Significance);

            // Calculate error for the output layer.
            _ef.CalculateError(
                _network.ActivationFunctions[0], _layerSums, _layerOutput,
                pair.Ideal, _actual, _layerDelta, _flatSpot[0],
                pair.Significance);

            // Apply regularization, if requested.
            if (_owner.L1 > EncogFramework.DefaultDoubleEqual ||
                _owner.L1 > EncogFramework.DefaultDoubleEqual)
            {
                double[] lp = new double[2];
                CalculateRegularizationPenalty(lp);
                for (int i = 0; i < _actual.Length; i++)
                {
                    double p = (lp[0] * _owner.L1) + (lp[1] * _owner.L2);
                    _layerDelta[i] += p;
                }
            }

            // Propagate backwards (chain rule from calculus).
            for (int i = _network.BeginTraining; i < _network
                 .EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// Process one training set element.
        /// </summary>
        ///
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        /// <param name="s">The significance of this error.</param>
        private void Process(IMLDataPair pair)
        {
            _network.Compute(pair.Input, _actual);

            _errorCalculation.UpdateError(_actual, pair.Ideal, pair.Significance);
            _ef.CalculateError(pair.Ideal, _actual, _layerDelta);

            for (int i = 0; i < _actual.Length; i++)
            {
                _layerDelta[i] = (_network.ActivationFunctions[0]
                                  .DerivativeFunction(_layerSums[i], _layerOutput[i]) + _flatSpot[0])
                                 * _layerDelta[i] * pair.Significance;
            }

            for (int i = _network.BeginTraining; i < _network.EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }
        /// <summary>
        /// Process one training set element.
        /// </summary>
        ///
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        /// <param name="s">The significance of this error.</param>
        private void Process(double[] input, double[] ideal, double s)
        {
            _network.Compute(input, _actual);

            _errorCalculation.UpdateError(_actual, ideal, s);
            _ef.CalculateError(ideal, _actual, _layerDelta);

            for (int i = 0; i < _actual.Length; i++)
            {
                _layerDelta[i] = (_network.ActivationFunctions[0]
                                  .DerivativeFunction(_layerSums[i], _layerOutput[i]) + _flatSpot[0])
                                 * _layerDelta[i] * s;
            }

            for (int i = _network.BeginTraining; i < _network.EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }
        public void Process(IMLDataPair pair)
        {
            _errorCalculation = new ErrorCalculation();

            double[] actual = new double[_flat.OutputCount];

            _flat.Compute(pair.Input, actual);

            _errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance);

            // Calculate error for the output layer.
            _errorFunction.CalculateError(
                _flat.ActivationFunctions[0], _flat.LayerSums, _flat.LayerOutput,
                pair.Ideal, actual, _layerDelta, 0,
                pair.Significance);

            // Apply regularization, if requested.
            if (L1 > EncogFramework.DefaultDoubleEqual ||
                L2 > EncogFramework.DefaultDoubleEqual)
            {
                double[] lp = new double[2];
                CalculateRegularizationPenalty(lp);
                for (int i = 0; i < actual.Length; i++)
                {
                    double p = (lp[0] * L1) + (lp[1] * L2);
                    _layerDelta[i] += p;
                }
            }

            // Propagate backwards (chain rule from calculus).
            for (int i = _flat.BeginTraining; i < _flat
                 .EndTraining; i++)
            {
                ProcessLevel(i);
            }
        }