/// <summary> /// Create a context connection, such as those used by Jordan/Elmann. /// </summary> /// <param name="source">The source layer.</param> /// <param name="target">The target layer.</param> /// <returns>The newly created context layer.</returns> public IFreeformLayer CreateContext(IFreeformLayer source, IFreeformLayer target) { const double biasActivation = 0.0; if (source.Neurons[0].Outputs.Count < 1) { throw new FreeformNetworkError( "A layer cannot have a context layer connected if there are no other outbound connections from the source layer. Please connect the source layer somewhere else first."); } IActivationFunction activatonFunction = source.Neurons[0].InputSummation .ActivationFunction; // first create the context layer IFreeformLayer result = _layerFactory.Factor(); for (int i = 0; i < source.Count; i++) { IFreeformNeuron neuron = source.Neurons[i]; if (neuron.IsBias) { IFreeformNeuron biasNeuron = _neuronFactory .FactorRegular(null); biasNeuron.IsBias = true; biasNeuron.Activation = neuron.Activation; result.Add(biasNeuron); } else { IFreeformNeuron contextNeuron = _neuronFactory .FactorContext(neuron); result.Add(contextNeuron); } } // now connect the context layer to the target layer ConnectLayers(result, target, activatonFunction, biasActivation, false); return(result); }
/// <summary> /// Create a hidden layer. /// </summary> /// <param name="neuronCount">The neuron count.</param> /// <returns>The newly created layer.</returns> public IFreeformLayer CreateLayer(int neuronCount) { if (neuronCount < 1) { throw new FreeformNetworkError( "Layer must have at least one neuron."); } IFreeformLayer result = _layerFactory.Factor(); // Add the neurons for this layer for (int i = 0; i < neuronCount; i++) { result.Add(_neuronFactory.FactorRegular(null)); } return(result); }
/// <summary> /// Connect two layers. /// </summary> /// <param name="source">The source layer.</param> /// <param name="target">The target layer.</param> /// <param name="theActivationFunction">The activation function to use.</param> /// <param name="biasActivation">The bias activation to use.</param> /// <param name="isRecurrent">True, if this is a recurrent connection.</param> public void ConnectLayers(IFreeformLayer source, IFreeformLayer target, IActivationFunction theActivationFunction, double biasActivation, bool isRecurrent) { // create bias, if requested if (biasActivation > EncogFramework.DefaultDoubleEqual) { // does the source already have a bias? if (source.HasBias) { throw new FreeformNetworkError( "The source layer already has a bias neuron, you cannot create a second."); } IFreeformNeuron biasNeuron = _neuronFactory .FactorRegular(null); biasNeuron.Activation = biasActivation; biasNeuron.IsBias = true; source.Add(biasNeuron); } // create connections foreach (IFreeformNeuron targetNeuron in target.Neurons) { // create the summation for the target IInputSummation summation = targetNeuron.InputSummation; // do not create a second input summation if (summation == null) { summation = _summationFactory.Factor(theActivationFunction); targetNeuron.InputSummation = summation; } // connect the source neurons to the target neuron foreach (IFreeformNeuron sourceNeuron in source.Neurons) { IFreeformConnection connection = _connectionFactory .Factor(sourceNeuron, targetNeuron); sourceNeuron.AddOutput(connection); targetNeuron.AddInput(connection); } } }
/// <summary> /// Craete a freeform network from a basic network. /// </summary> /// <param name="network">The basic network to use.</param> public FreeformNetwork(BasicNetwork network) { if (network.LayerCount < 2) { throw new FreeformNetworkError( "The BasicNetwork must have at least two layers to be converted."); } // handle each layer IFreeformLayer previousLayer = null; for (int currentLayerIndex = 0; currentLayerIndex < network .LayerCount; currentLayerIndex++) { // create the layer IFreeformLayer currentLayer = _layerFactory.Factor(); // Is this the input layer? if (_inputLayer == null) { _inputLayer = currentLayer; } // Add the neurons for this layer for (int i = 0; i < network.GetLayerNeuronCount(currentLayerIndex); i++) { // obtain the summation object. IInputSummation summation = null; if (previousLayer != null) { summation = _summationFactory.Factor(network .GetActivation(currentLayerIndex)); } // add the new neuron currentLayer.Add(_neuronFactory.FactorRegular(summation)); } // Fully connect this layer to previous if (previousLayer != null) { ConnectLayersFromBasic(network, currentLayerIndex - 1, previousLayer, currentLayer); } // Add the bias neuron // The bias is added after connections so it has no inputs if (network.IsLayerBiased(currentLayerIndex)) { IFreeformNeuron biasNeuron = _neuronFactory .FactorRegular(null); biasNeuron.IsBias = true; biasNeuron.Activation = network .GetLayerBiasActivation(currentLayerIndex); currentLayer.Add(biasNeuron); } // update previous layer previousLayer = currentLayer; } // finally, set the output layer. _outputLayer = previousLayer; }