/// <summary>
        /// Connect layers from a BasicNetwork. Used internally only.
        /// </summary>
        /// <param name="network">The BasicNetwork.</param>
        /// <param name="fromLayerIdx">The from layer index.</param>
        /// <param name="source">The from layer.</param>
        /// <param name="target">The target.</param>
        private void ConnectLayersFromBasic(BasicNetwork network,
                                            int fromLayerIdx, IFreeformLayer source, IFreeformLayer target)
        {
            for (int targetNeuronIdx = 0; targetNeuronIdx < target.Count; targetNeuronIdx++)
            {
                for (int sourceNeuronIdx = 0; sourceNeuronIdx < source.Count; sourceNeuronIdx++)
                {
                    IFreeformNeuron sourceNeuron = source.Neurons[sourceNeuronIdx];
                    IFreeformNeuron targetNeuron = target.Neurons[targetNeuronIdx];

                    // neurons with no input (i.e. bias neurons)
                    if (targetNeuron.InputSummation == null)
                    {
                        continue;
                    }

                    IFreeformConnection connection = _connectionFactory
                                                     .Factor(sourceNeuron, targetNeuron);
                    sourceNeuron.AddOutput(connection);
                    targetNeuron.AddInput(connection);
                    double weight = network.GetWeight(fromLayerIdx,
                                                      sourceNeuronIdx, targetNeuronIdx);
                    connection.Weight = weight;
                }
            }
        }
示例#2
0
        /// <inheritdoc/>
        protected override void LearnConnection(IFreeformConnection connection)
        {
            double gradient = connection.GetTempTraining(0);
            double delta    = (gradient * _learningRate)
                              + (connection.GetTempTraining(1) * _momentum);

            connection.SetTempTraining(1, delta);
            connection.Weight += delta;
        }
        /// <summary>
        /// Connect two layers.
        /// </summary>
        /// <param name="source">The source layer.</param>
        /// <param name="target">The target layer.</param>
        /// <param name="theActivationFunction">The activation function to use.</param>
        /// <param name="biasActivation">The bias activation to use.</param>
        /// <param name="isRecurrent">True, if this is a recurrent connection.</param>
        public void ConnectLayers(IFreeformLayer source,
                                  IFreeformLayer target,
                                  IActivationFunction theActivationFunction,
                                  double biasActivation, bool isRecurrent)
        {
            // create bias, if requested
            if (biasActivation > EncogFramework.DefaultDoubleEqual)
            {
                // does the source already have a bias?
                if (source.HasBias)
                {
                    throw new FreeformNetworkError(
                              "The source layer already has a bias neuron, you cannot create a second.");
                }
                IFreeformNeuron biasNeuron = _neuronFactory
                                             .FactorRegular(null);
                biasNeuron.Activation = biasActivation;
                biasNeuron.IsBias     = true;
                source.Add(biasNeuron);
            }

            // create connections
            foreach (IFreeformNeuron targetNeuron in target.Neurons)
            {
                // create the summation for the target
                IInputSummation summation = targetNeuron.InputSummation;

                // do not create a second input summation
                if (summation == null)
                {
                    summation = _summationFactory.Factor(theActivationFunction);
                    targetNeuron.InputSummation = summation;
                }

                // connect the source neurons to the target neuron
                foreach (IFreeformNeuron sourceNeuron in source.Neurons)
                {
                    IFreeformConnection connection = _connectionFactory
                                                     .Factor(sourceNeuron, targetNeuron);
                    sourceNeuron.AddOutput(connection);
                    targetNeuron.AddInput(connection);
                }
            }
        }
示例#4
0
 /// <inheritdoc/>
 public void Add(IFreeformConnection connection)
 {
     _inputs.Add(connection);
 }
        /// <inheritdoc />
        protected override void LearnConnection(IFreeformConnection connection)
        {
            // multiply the current and previous gradient, and take the
            // sign. We want to see if the gradient has changed its sign.
            int change = EncogMath
                .Sign(connection
                    .GetTempTraining(TempGradient)
                      *connection
                          .GetTempTraining(TempLastGradient));
            double weightChange = 0;

            // if the gradient has retained its sign, then we increase the
            // delta so that it will converge faster
            if (change > 0)
            {
                double delta = connection
                    .GetTempTraining(TempUpdate)
                               *RPROPConst.PositiveEta;
                delta = Math.Min(delta, _maxStep);
                weightChange = EncogMath
                    .Sign(connection
                        .GetTempTraining(TempGradient))
                               *delta;
                connection.SetTempTraining(
                    TempUpdate, delta);
                connection
                    .SetTempTraining(
                        TempLastGradient,
                        connection
                            .GetTempTraining(TempGradient));
            }
            else if (change < 0)
            {
                // if change<0, then the sign has changed, and the last
                // delta was too big
                double delta = connection
                    .GetTempTraining(TempUpdate)
                               *RPROPConst.NegativeEta;
                delta = Math.Max(delta, RPROPConst.DeltaMin);
                connection.SetTempTraining(
                    TempUpdate, delta);
                weightChange = -connection
                    .GetTempTraining(TempLastWeightDelta);
                // set the previous gradient to zero so that there will be no
                // adjustment the next iteration
                connection.SetTempTraining(
                    TempLastGradient, 0);
            }
            else if (change == 0)
            {
                // if change==0 then there is no change to the delta
                double delta = connection
                    .GetTempTraining(TempUpdate);
                weightChange = EncogMath
                    .Sign(connection
                        .GetTempTraining(TempGradient))
                               *delta;
                connection
                    .SetTempTraining(
                        TempLastGradient,
                        connection
                            .GetTempTraining(TempGradient));
            }

            // apply the weight change, if any
            connection.Weight += weightChange;
            connection.SetTempTraining(
                TempLastWeightDelta,
                weightChange);
        }
        /// <inheritdoc />
        protected override void LearnConnection(IFreeformConnection connection)
        {
            // multiply the current and previous gradient, and take the
            // sign. We want to see if the gradient has changed its sign.
            int change = EncogMath
                         .Sign(connection
                               .GetTempTraining(TempGradient)
                               * connection
                               .GetTempTraining(TempLastGradient));
            double weightChange = 0;

            // if the gradient has retained its sign, then we increase the
            // delta so that it will converge faster
            if (change > 0)
            {
                double delta = connection
                               .GetTempTraining(TempUpdate)
                               * RPROPConst.PositiveEta;
                delta        = Math.Min(delta, _maxStep);
                weightChange = EncogMath
                               .Sign(connection
                                     .GetTempTraining(TempGradient))
                               * delta;
                connection.SetTempTraining(
                    TempUpdate, delta);
                connection
                .SetTempTraining(
                    TempLastGradient,
                    connection
                    .GetTempTraining(TempGradient));
            }
            else if (change < 0)
            {
                // if change<0, then the sign has changed, and the last
                // delta was too big
                double delta = connection
                               .GetTempTraining(TempUpdate)
                               * RPROPConst.NegativeEta;
                delta = Math.Max(delta, RPROPConst.DeltaMin);
                connection.SetTempTraining(
                    TempUpdate, delta);
                weightChange = -connection
                               .GetTempTraining(TempLastWeightDelta);
                // set the previous gradient to zero so that there will be no
                // adjustment the next iteration
                connection.SetTempTraining(
                    TempLastGradient, 0);
            }
            else if (change == 0)
            {
                // if change==0 then there is no change to the delta
                double delta = connection
                               .GetTempTraining(TempUpdate);
                weightChange = EncogMath
                               .Sign(connection
                                     .GetTempTraining(TempGradient))
                               * delta;
                connection
                .SetTempTraining(
                    TempLastGradient,
                    connection
                    .GetTempTraining(TempGradient));
            }

            // apply the weight change, if any
            connection.Weight += weightChange;
            connection.SetTempTraining(
                TempLastWeightDelta,
                weightChange);
        }
 /// <inheritdoc/>
 public void AddOutput(IFreeformConnection connection)
 {
     _outputConnections.Add(connection);
 }
 /// <inheritdoc/>
 public void AddInput(IFreeformConnection connection)
 {
     InputSummation.Add(connection);
 }
 /// <inheritdoc/>
 public void Add(IFreeformConnection connection)
 {
     _inputs.Add(connection);
 }
示例#10
0
 /// <inheritdoc/>
 public void AddOutput(IFreeformConnection connection)
 {
     _outputConnections.Add(connection);
 }
示例#11
0
 /// <inheritdoc/>
 public void AddInput(IFreeformConnection connection)
 {
     InputSummation.Add(connection);
 }
 /// <summary>
 /// Learn for a single connection.
 /// </summary>
 /// <param name="connection">The connection to learn from.</param>
 protected abstract void LearnConnection(IFreeformConnection connection);
 /// <inheritdoc/>
 protected override void LearnConnection(IFreeformConnection connection)
 {
     double gradient = connection.GetTempTraining(0);
     double delta = (gradient * _learningRate)
             + (connection.GetTempTraining(1) * _momentum);
     connection.SetTempTraining(1, delta);
     connection.Weight += delta;
 }
 /// <summary>
 /// Learn for a single connection.
 /// </summary>
 /// <param name="connection">The connection to learn from.</param>
 protected abstract void LearnConnection(IFreeformConnection connection);