/// <summary>
        /// Connect two layers.
        /// </summary>
        /// <param name="source">The source layer.</param>
        /// <param name="target">The target layer.</param>
        /// <param name="theActivationFunction">The activation function to use.</param>
        /// <param name="biasActivation">The bias activation to use.</param>
        /// <param name="isRecurrent">True, if this is a recurrent connection.</param>
        public void ConnectLayers(IFreeformLayer source,
                                  IFreeformLayer target,
                                  IActivationFunction theActivationFunction,
                                  double biasActivation, bool isRecurrent)
        {
            // create bias, if requested
            if (biasActivation > EncogFramework.DefaultDoubleEqual)
            {
                // does the source already have a bias?
                if (source.HasBias)
                {
                    throw new FreeformNetworkError(
                              "The source layer already has a bias neuron, you cannot create a second.");
                }
                IFreeformNeuron biasNeuron = _neuronFactory
                                             .FactorRegular(null);
                biasNeuron.Activation = biasActivation;
                biasNeuron.IsBias     = true;
                source.Add(biasNeuron);
            }

            // create connections
            foreach (IFreeformNeuron targetNeuron in target.Neurons)
            {
                // create the summation for the target
                IInputSummation summation = targetNeuron.InputSummation;

                // do not create a second input summation
                if (summation == null)
                {
                    summation = _summationFactory.Factor(theActivationFunction);
                    targetNeuron.InputSummation = summation;
                }

                // connect the source neurons to the target neuron
                foreach (IFreeformNeuron sourceNeuron in source.Neurons)
                {
                    IFreeformConnection connection = _connectionFactory
                                                     .Factor(sourceNeuron, targetNeuron);
                    sourceNeuron.AddOutput(connection);
                    targetNeuron.AddInput(connection);
                }
            }
        }
 /// <inheritdoc/>
 public IFreeformNeuron FactorRegular(IInputSummation o)
 {
     return new BasicFreeformNeuron(o);
 }
 /// <summary>
 /// Construct a basic freeform network.
 /// </summary>
 /// <param name="theInputSummation">The input summation to use.</param>
 public BasicFreeformNeuron(IInputSummation theInputSummation)
 {
     InputSummation = theInputSummation;
 }
Exemple #4
0
 /// <summary>
 /// Construct a basic freeform network.
 /// </summary>
 /// <param name="theInputSummation">The input summation to use.</param>
 public BasicFreeformNeuron(IInputSummation theInputSummation)
 {
     InputSummation = theInputSummation;
 }
        /// <summary>
        /// Craete a freeform network from a basic network.
        /// </summary>
        /// <param name="network">The basic network to use.</param>
        public FreeformNetwork(BasicNetwork network)
        {
            if (network.LayerCount < 2)
            {
                throw new FreeformNetworkError(
                          "The BasicNetwork must have at least two layers to be converted.");
            }

            // handle each layer
            IFreeformLayer previousLayer = null;

            for (int currentLayerIndex = 0;
                 currentLayerIndex < network
                 .LayerCount;
                 currentLayerIndex++)
            {
                // create the layer
                IFreeformLayer currentLayer = _layerFactory.Factor();

                // Is this the input layer?
                if (_inputLayer == null)
                {
                    _inputLayer = currentLayer;
                }

                // Add the neurons for this layer
                for (int i = 0; i < network.GetLayerNeuronCount(currentLayerIndex); i++)
                {
                    // obtain the summation object.
                    IInputSummation summation = null;

                    if (previousLayer != null)
                    {
                        summation = _summationFactory.Factor(network
                                                             .GetActivation(currentLayerIndex));
                    }

                    // add the new neuron
                    currentLayer.Add(_neuronFactory.FactorRegular(summation));
                }

                // Fully connect this layer to previous
                if (previousLayer != null)
                {
                    ConnectLayersFromBasic(network, currentLayerIndex - 1,
                                           previousLayer, currentLayer);
                }

                // Add the bias neuron
                // The bias is added after connections so it has no inputs
                if (network.IsLayerBiased(currentLayerIndex))
                {
                    IFreeformNeuron biasNeuron = _neuronFactory
                                                 .FactorRegular(null);
                    biasNeuron.IsBias     = true;
                    biasNeuron.Activation = network
                                            .GetLayerBiasActivation(currentLayerIndex);
                    currentLayer.Add(biasNeuron);
                }

                // update previous layer
                previousLayer = currentLayer;
            }

            // finally, set the output layer.
            _outputLayer = previousLayer;
        }
Exemple #6
0
 /// <inheritdoc/>
 public IFreeformNeuron FactorRegular(IInputSummation o)
 {
     return(new BasicFreeformNeuron(o));
 }