Beispiel #1
0
        /// <summary>
        /// Calculate the current error.
        /// </summary>
        public void CalcError()
        {
            BackpropagationLayer next = this.backpropagation
                                        .GetBackpropagationLayer(this.layer.Next);

            for (int i = 0; i < this.layer.Next.NeuronCount; i++)
            {
                for (int j = 0; j < this.layer.NeuronCount; j++)
                {
                    AccumulateMatrixDelta(j, i, next.GetErrorDelta(i)
                                          * this.layer.GetFire(j));
                    SetError(j, GetError(j) + this.layer.LayerMatrix[j, i]
                             * next.GetErrorDelta(i));
                }
                AccumulateThresholdDelta(i, next.GetErrorDelta(i));
            }

            if (this.layer.IsHidden())
            {
                // hidden layer deltas
                for (int i = 0; i < this.layer.NeuronCount; i++)
                {
                    SetErrorDelta(i, BoundNumbers.Bound(CalculateDelta(i)));
                }
            }
        }
Beispiel #2
0
        /// <summary>
        /// A activation function for a neural network.
        /// </summary>
        /// <param name="d">The input to the function.</param>
        /// <returns>The ouput from the function.</returns>
        public double ActivationFunction(double input)
        {
            //return (BoundNumbers.Exp(input * 2.0) - 1) / (BoundNumbers.Exp(input * 2.0 + 1));

            //Correction suggested by https://github.com/felipetavares
            return((BoundNumbers.Exp(input) - BoundNumbers.Exp(-input)) / (BoundNumbers.Exp(input) + BoundNumbers.Exp(-input)));
        }
 public void CalculateError(double[] ideal)
 {
     for (var i = 0; i < _layer.NeuronCount; i++)
     {
         SetError(i, ideal[i] - _layer.Fire[i]);
         SetErrorDelta(i, BoundNumbers.Bound(CalculateDelta(i)));
     }
 }
Beispiel #4
0
 /// <summary>
 /// Calculate the error for the given ideal values.
 /// </summary>
 /// <param name="ideal">Ideal output values.</param>
 public void CalcError(double[] ideal)
 {
     // layer errors and deltas for output layer
     for (int i = 0; i < this.layer.NeuronCount; i++)
     {
         SetError(i, ideal[i] - this.layer.GetFire(i));
         SetErrorDelta(i, BoundNumbers.Bound(CalculateDelta(i)));
     }
 }
        public void CalculateError()
        {
            var next = _parent.GetBackpropagationLayer(_layer.Next);

            for (var i = 0; i < _layer.Next.NeuronCount; i++)
            {
                for (var j = 0; j < _layer.NeuronCount; j++)
                {
                    _accumulatedMatrixDelta.Add(j, i, next.GetErrorDelta(i) * _layer.Fire[j]);
                    SetError(j, GetError(j) + _layer.LayerMatrix[j, i] * next.GetErrorDelta(i));
                }
                AccumulateThresholdDelta(i, next.GetErrorDelta(i));
            }

            if (_layer.IsHiddenLayer)
            {
                for (var i = 0; i < _layer.NeuronCount; i++)                 // hidden layer deltas
                {
                    SetErrorDelta(i, BoundNumbers.Bound(CalculateDelta(i)));
                }
            }
        }
 public override double Activate(double input)
 {
     return(1.0 / (1 + BoundNumbers.Exp(-1.0 * input)));
 }
Beispiel #7
0
        /// <summary>
        /// Perform one iteration.
        /// </summary>
        ///
        public override void Iteration()
        {
            if (_mustInit)
            {
                Init();
            }

            base.PreIteration();

            RollIteration();
            int numWeights = _weights.Length;

            // Storage space for previous iteration values.

            if (_restart)
            {
                // First time through, set initial values for SCG parameters.
                _lambda  = FirstLambda;
                _lambda2 = 0;
                _k       = 1;
                _success = true;
                _restart = false;
            }

            // If an error reduction is possible, calculate 2nd order info.
            if (_success)
            {
                // If the search direction is small, stop.
                _magP = EngineArray.VectorProduct(_p, _p);

                double sigma = FirstSigma
                               / Math.Sqrt(_magP);

                // In order to compute the new step, we need a new gradient.
                // First, save off the old data.
                EngineArray.ArrayCopy(Gradients, _oldGradient);
                EngineArray.ArrayCopy(_weights, _oldWeights);
                _oldError = Error;

                // Now we move to the new point in weight space.
                for (int i = 0; i < numWeights; ++i)
                {
                    _weights[i] += sigma * _p[i];
                }

                EngineArray.ArrayCopy(_weights, Network.Flat.Weights);

                // And compute the new gradient.
                CalculateGradients();

                // Now we have the new gradient, and we continue the step
                // computation.
                _delta = 0;
                for (int i = 0; i < numWeights; ++i)
                {
                    double step = (Gradients[i] - _oldGradient[i])
                                  / sigma;
                    _delta += _p[i] * step;
                }
            }

            // Scale delta.
            _delta += (_lambda - _lambda2) * _magP;

            // If delta <= 0, make Hessian positive definite.
            if (_delta <= 0)
            {
                _lambda2 = 2 * (_lambda - _delta / _magP);
                _delta   = _lambda * _magP - _delta;
                _lambda  = _lambda2;
            }

            // Calculate step size.
            double mu    = EngineArray.VectorProduct(_p, _r);
            double alpha = mu / _delta;

            // Calculate the comparison parameter.
            // We must compute a new gradient, but this time we do not
            // want to keep the old values. They were useful only for
            // approximating the Hessian.
            for (int i = 0; i < numWeights; ++i)
            {
                _weights[i] = _oldWeights[i] + alpha * _p[i];
            }

            EngineArray.ArrayCopy(_weights, Network.Flat.Weights);

            CalculateGradients();

            double gdelta = 2 * _delta * (_oldError - Error)
                            / (mu * mu);

            // If gdelta >= 0, a successful reduction in error is possible.
            if (gdelta >= 0)
            {
                // Product of r(k+1) by r(k)
                double rsum = 0;

                // Now r = r(k+1).
                for (int i = 0; i < numWeights; ++i)
                {
                    double tmp = -Gradients[i];
                    rsum += tmp * _r[i];
                    _r[i] = tmp;
                }
                _lambda2 = 0;
                _success = true;

                // Do we need to restart?
                if (_k >= numWeights)
                {
                    _restart = true;
                    EngineArray.ArrayCopy(_r, _p);
                }
                else
                {
                    // Compute new conjugate direction.
                    double beta = (EngineArray.VectorProduct(_r, _r) - rsum)
                                  / mu;

                    // Update direction vector.
                    for (int i = 0; i < numWeights; ++i)
                    {
                        _p[i] = _r[i] + beta * _p[i];
                    }

                    _restart = false;
                }

                if (gdelta >= 0.75D)
                {
                    _lambda *= 0.25D;
                }
            }
            else
            {
                // A reduction in error was not possible.
                // under_tolerance = false;

                // Go back to w(k) since w(k) + alpha*p(k) is not better.
                EngineArray.ArrayCopy(_oldWeights, _weights);
                Error    = _oldError;
                _lambda2 = _lambda;
                _success = false;
            }

            if (gdelta < 0.25D)
            {
                _lambda += _delta * (1 - gdelta) / _magP;
            }

            _lambda = BoundNumbers.Bound(_lambda);

            ++_k;

            EngineArray.ArrayCopy(_weights, Network.Flat.Weights);

            base.PostIteration();
        }
 /// <summary>
 /// A activation function for a neural network.
 /// </summary>
 /// <param name="d">The input to the function.</param>
 /// <returns>The ouput from the function.</returns>
 public double ActivationFunction(double d)
 {
     return(1.0 / (1 + BoundNumbers.Exp(-1.0 * d)));
 }
Beispiel #9
0
 public override double Activate(double input)
 {
     return((BoundNumbers.Exp(input * 2.0) - 1.0) / (BoundNumbers.Exp(input * 2.0) + 1.0));
 }
Beispiel #10
0
 /// <summary>
 /// Set the error for the specified neuron.
 /// </summary>
 /// <param name="index">The specified neuron.</param>
 /// <param name="e">The error value.</param>
 public void SetError(int index, double e)
 {
     this.error[index] = BoundNumbers.Bound(e);
 }
        /// <summary>
        /// Perform one iteration.
        /// </summary>
        ///
        public override void Iteration()
        {
            if (shouldInit)
            {
                Init();
            }

            int numWeights = this.weights.Length;

            // Storage space for previous iteration values.

            if (this.restart)
            {
                // First time through, set initial values for SCG parameters.
                this.lambda  = TrainFlatNetworkSCG.FIRST_LAMBDA;
                this.lambda2 = 0;
                this.k       = 1;
                this.success = true;
                this.restart = false;
            }

            // If an error reduction is possible, calculate 2nd order info.
            if (this.success)
            {
                // If the search direction is small, stop.
                this.magP = EngineArray.VectorProduct(this.p, this.p);

                double sigma = TrainFlatNetworkSCG.FIRST_SIGMA
                               / Math.Sqrt(this.magP);

                // In order to compute the new step, we need a new gradient.
                // First, save off the old data.
                EngineArray.ArrayCopy(this.gradients, this.oldGradient);
                EngineArray.ArrayCopy(this.weights, this.oldWeights);
                this.oldError = Error;

                // Now we move to the new point in weight space.
                for (int i = 0; i < numWeights; ++i)
                {
                    this.weights[i] += sigma * this.p[i];
                }

                EngineArray.ArrayCopy(this.weights, this.network.Weights);

                // And compute the new gradient.
                CalculateGradients();

                // Now we have the new gradient, and we continue the step
                // computation.
                this.delta = 0;
                for (int i_0 = 0; i_0 < numWeights; ++i_0)
                {
                    double step = (this.gradients[i_0] - this.oldGradient[i_0])
                                  / sigma;
                    this.delta += this.p[i_0] * step;
                }
            }

            // Scale delta.
            this.delta += (this.lambda - this.lambda2) * this.magP;

            // If delta <= 0, make Hessian positive definite.
            if (this.delta <= 0)
            {
                this.lambda2 = 2 * (this.lambda - this.delta / this.magP);
                this.delta   = this.lambda * this.magP - this.delta;
                this.lambda  = this.lambda2;
            }

            // Calculate step size.
            double mu    = EngineArray.VectorProduct(this.p, this.r);
            double alpha = mu / this.delta;

            // Calculate the comparison parameter.
            // We must compute a new gradient, but this time we do not
            // want to keep the old values. They were useful only for
            // approximating the Hessian.
            for (int i_1 = 0; i_1 < numWeights; ++i_1)
            {
                this.weights[i_1] = this.oldWeights[i_1] + alpha * this.p[i_1];
            }

            EngineArray.ArrayCopy(this.weights, this.network.Weights);

            CalculateGradients();

            double gdelta = 2 * this.delta * (this.oldError - Error)
                            / (mu * mu);

            // If gdelta >= 0, a successful reduction in error is possible.
            if (gdelta >= 0)
            {
                // Product of r(k+1) by r(k)
                double rsum = 0;

                // Now r = r(k+1).
                for (int i_2 = 0; i_2 < numWeights; ++i_2)
                {
                    double tmp = -this.gradients[i_2];
                    rsum       += tmp * this.r[i_2];
                    this.r[i_2] = tmp;
                }
                this.lambda2 = 0;
                this.success = true;

                // Do we need to restart?
                if (this.k >= numWeights)
                {
                    this.restart = true;
                    EngineArray.ArrayCopy(this.r, this.p);
                }
                else
                {
                    // Compute new conjugate direction.
                    double beta = (EngineArray.VectorProduct(this.r, this.r) - rsum)
                                  / mu;

                    // Update direction vector.
                    for (int i_3 = 0; i_3 < numWeights; ++i_3)
                    {
                        this.p[i_3] = this.r[i_3] + beta * this.p[i_3];
                    }

                    this.restart = false;
                }

                if (gdelta >= 0.75D)
                {
                    this.lambda *= 0.25D;
                }
            }
            else
            {
                // A reduction in error was not possible.
                // under_tolerance = false;

                // Go back to w(k) since w(k) + alpha*p(k) is not better.
                EngineArray.ArrayCopy(this.oldWeights, this.weights);
                this.currentError = this.oldError;
                this.lambda2      = this.lambda;
                this.success      = false;
            }

            if (gdelta < 0.25D)
            {
                this.lambda += this.delta * (1 - gdelta) / this.magP;
            }

            this.lambda = BoundNumbers.Bound(this.lambda);

            ++this.k;

            EngineArray.ArrayCopy(this.weights, this.network.Weights);
        }
        /// <summary>
        /// A activation function for a neural network.
        /// </summary>
        /// <param name="d">The input to the function.</param>
        /// <returns>The ouput from the function.</returns>
        public double ActivationFunction(double d)
        {
            double result = (BoundNumbers.Exp(d * 2.0) - 1.0) / (BoundNumbers.Exp(d * 2.0) + 1.0);

            return(result);
        }