/// <summary> /// Connect layers from a BasicNetwork. Used internally only. /// </summary> /// <param name="network">The BasicNetwork.</param> /// <param name="fromLayerIdx">The from layer index.</param> /// <param name="source">The from layer.</param> /// <param name="target">The target.</param> private void ConnectLayersFromBasic(BasicNetwork network, int fromLayerIdx, IFreeformLayer source, IFreeformLayer target) { for (int targetNeuronIdx = 0; targetNeuronIdx < target.Count; targetNeuronIdx++) { for (int sourceNeuronIdx = 0; sourceNeuronIdx < source.Count; sourceNeuronIdx++) { IFreeformNeuron sourceNeuron = source.Neurons[sourceNeuronIdx]; IFreeformNeuron targetNeuron = target.Neurons[targetNeuronIdx]; // neurons with no input (i.e. bias neurons) if (targetNeuron.InputSummation == null) { continue; } IFreeformConnection connection = _connectionFactory .Factor(sourceNeuron, targetNeuron); sourceNeuron.AddOutput(connection); targetNeuron.AddInput(connection); double weight = network.GetWeight(fromLayerIdx, sourceNeuronIdx, targetNeuronIdx); connection.Weight = weight; } } }
/// <inheritdoc/> public int EncodedArrayLength() { int result = 0; var visited = new HashSet <IFreeformNeuron>(); IList <IFreeformNeuron> queue = _outputLayer.Neurons.ToList(); // first copy outputs to queue while (queue.Count > 0) { // pop a neuron off the queue IFreeformNeuron neuron = queue[0]; queue.RemoveAt(0); visited.Add(neuron); // find anymore neurons and add them to the queue. if (neuron.InputSummation != null) { foreach (IFreeformConnection connection in neuron .InputSummation.List) { result++; IFreeformNeuron nextNeuron = connection.Source; if (!visited.Contains(nextNeuron)) { queue.Add(nextNeuron); } } } } return(result); }
/// <summary> /// Construct a basic freeform connection. /// </summary> /// <param name="theSource">The source neuron.</param> /// <param name="theTarget">The target neuron.</param> public BasicFreeformConnection(IFreeformNeuron theSource, IFreeformNeuron theTarget) { Weight = 0.0; Source = theSource; Target = theTarget; }
/// <summary> /// Process training for pure batch mode (one single batch). /// </summary> protected void ProcessPureBatch() { var errorCalc = new ErrorCalculation(); _visited.Clear(); foreach (IMLDataPair pair in _training) { var input = pair.Input; var ideal = pair.Ideal; var actual = _network.Compute(input); var sig = pair.Significance; errorCalc.UpdateError(actual, ideal, sig); for (int i = 0; i < _network.OutputCount; i++) { var diff = (ideal[i] - actual[i]) * sig; IFreeformNeuron neuron = _network.OutputLayer.Neurons[i]; CalculateOutputDelta(neuron, diff); CalculateNeuronGradient(neuron); } } // Set the overall error. Error = errorCalc.Calculate(); // Learn for all data. Learn(); }
/// <inheritdoc/> public void EncodeToArray(double[] encoded) { int index = 0; var visited = new HashSet <IFreeformNeuron>(); IList <IFreeformNeuron> queue = _outputLayer.Neurons.ToList(); // first copy outputs to queue while (queue.Count > 0) { // pop a neuron off the queue IFreeformNeuron neuron = queue[0]; queue.RemoveAt(0); visited.Add(neuron); // find anymore neurons and add them to the queue. if (neuron.InputSummation != null) { foreach (IFreeformConnection connection in neuron .InputSummation.List) { encoded[index++] = connection.Weight; IFreeformNeuron nextNeuron = connection.Source; if (!visited.Contains(nextNeuron)) { queue.Add(nextNeuron); } } } } }
/// <summary> /// Calculate the gradient for a neuron. /// </summary> /// <param name="toNeuron">The neuron to calculate for.</param> private void CalculateNeuronGradient(IFreeformNeuron toNeuron) { // Only calculate if layer has inputs, because we've already handled the // output // neurons, this means a hidden layer. if (toNeuron.InputSummation != null) { // between the layer deltas between toNeuron and the neurons that // feed toNeuron. // also calculate all inbound gradeints to toNeuron foreach (IFreeformConnection connection in toNeuron .InputSummation.List) { // calculate the gradient double gradient = connection.Source.Activation * toNeuron.GetTempTraining(0); connection.AddTempTraining(0, gradient); // calculate the next layer delta IFreeformNeuron fromNeuron = connection.Source; double sum = fromNeuron.Outputs.Sum(toConnection => toConnection.Target.GetTempTraining(0) * toConnection.Weight); double neuronOutput = fromNeuron.Activation; double neuronSum = fromNeuron.Sum; double deriv = toNeuron.InputSummation .ActivationFunction .DerivativeFunction(neuronSum, neuronOutput); if (FixFlatSopt && (toNeuron.InputSummation .ActivationFunction is ActivationSigmoid)) { deriv += FlatSpotConst; } double layerDelta = sum * deriv; fromNeuron.SetTempTraining(0, layerDelta); } // recurse to the next level foreach (IFreeformConnection connection in toNeuron .InputSummation.List) { IFreeformNeuron fromNeuron = connection.Source; CalculateNeuronGradient(fromNeuron); } } }
/// <summary> /// Calculate the output delta for a neuron, given its difference. /// Only used for output neurons. /// </summary> /// <param name="neuron">The neuron.</param> /// <param name="diff">The difference.</param> private void CalculateOutputDelta(IFreeformNeuron neuron, double diff) { double neuronOutput = neuron.Activation; double neuronSum = neuron.InputSummation.Sum; double deriv = neuron.InputSummation.ActivationFunction .DerivativeFunction(neuronSum, neuronOutput); if (FixFlatSopt && (neuron.InputSummation.ActivationFunction is ActivationSigmoid)) { deriv += FlatSpotConst; } double layerDelta = deriv * diff; neuron.SetTempTraining(0, layerDelta); }
/// <summary> /// Connect two layers. /// </summary> /// <param name="source">The source layer.</param> /// <param name="target">The target layer.</param> /// <param name="theActivationFunction">The activation function to use.</param> /// <param name="biasActivation">The bias activation to use.</param> /// <param name="isRecurrent">True, if this is a recurrent connection.</param> public void ConnectLayers(IFreeformLayer source, IFreeformLayer target, IActivationFunction theActivationFunction, double biasActivation, bool isRecurrent) { // create bias, if requested if (biasActivation > EncogFramework.DefaultDoubleEqual) { // does the source already have a bias? if (source.HasBias) { throw new FreeformNetworkError( "The source layer already has a bias neuron, you cannot create a second."); } IFreeformNeuron biasNeuron = _neuronFactory .FactorRegular(null); biasNeuron.Activation = biasActivation; biasNeuron.IsBias = true; source.Add(biasNeuron); } // create connections foreach (IFreeformNeuron targetNeuron in target.Neurons) { // create the summation for the target IInputSummation summation = targetNeuron.InputSummation; // do not create a second input summation if (summation == null) { summation = _summationFactory.Factor(theActivationFunction); targetNeuron.InputSummation = summation; } // connect the source neurons to the target neuron foreach (IFreeformNeuron sourceNeuron in source.Neurons) { IFreeformConnection connection = _connectionFactory .Factor(sourceNeuron, targetNeuron); sourceNeuron.AddOutput(connection); targetNeuron.AddInput(connection); } } }
/// <summary> /// Process training batches. /// </summary> protected void ProcessBatches() { int lastLearn = 0; var errorCalc = new ErrorCalculation(); _visited.Clear(); foreach (IMLDataPair pair in _training) { var input = pair.Input; var ideal = pair.Ideal; var actual = _network.Compute(input); var sig = pair.Significance; errorCalc.UpdateError(actual, ideal, sig); for (int i = 0; i < _network.OutputCount; i++) { double diff = (ideal[i] - actual[i]) * sig; IFreeformNeuron neuron = _network.OutputLayer.Neurons[i]; CalculateOutputDelta(neuron, diff); CalculateNeuronGradient(neuron); } // Are we at the end of a batch. lastLearn++; if (lastLearn >= BatchSize) { lastLearn = 0; Learn(); } } // Handle any remaining data. if (lastLearn > 0) { Learn(); } // Set the overall error. Error = errorCalc.Calculate(); }
/// <summary> /// Create a context connection, such as those used by Jordan/Elmann. /// </summary> /// <param name="source">The source layer.</param> /// <param name="target">The target layer.</param> /// <returns>The newly created context layer.</returns> public IFreeformLayer CreateContext(IFreeformLayer source, IFreeformLayer target) { const double biasActivation = 0.0; if (source.Neurons[0].Outputs.Count < 1) { throw new FreeformNetworkError( "A layer cannot have a context layer connected if there are no other outbound connections from the source layer. Please connect the source layer somewhere else first."); } IActivationFunction activatonFunction = source.Neurons[0].InputSummation .ActivationFunction; // first create the context layer IFreeformLayer result = _layerFactory.Factor(); for (int i = 0; i < source.Count; i++) { IFreeformNeuron neuron = source.Neurons[i]; if (neuron.IsBias) { IFreeformNeuron biasNeuron = _neuronFactory .FactorRegular(null); biasNeuron.IsBias = true; biasNeuron.Activation = neuron.Activation; result.Add(biasNeuron); } else { IFreeformNeuron contextNeuron = _neuronFactory .FactorContext(neuron); result.Add(contextNeuron); } } // now connect the context layer to the target layer ConnectLayers(result, target, activatonFunction, biasActivation, false); return(result); }
/// <summary> /// Perform the specified neuron task. /// </summary> /// <param name="visited">The visited list.</param> /// <param name="parentNeuron">The neuron to start with.</param> /// <param name="task">The task to perform.</param> private void PerformNeuronTask(HashSet <IFreeformNeuron> visited, IFreeformNeuron parentNeuron, FreeformNeuronTask task) { visited.Add(parentNeuron); task(parentNeuron); // does this neuron have any inputs? if (parentNeuron.InputSummation != null) { // visit the inputs foreach (IFreeformConnection connection in parentNeuron .InputSummation.List) { IFreeformNeuron neuron = connection.Source; // have we already visited this neuron? if (!visited.Contains(neuron)) { PerformNeuronTask(visited, neuron, task); } } } }
/// <inheritdoc/> public IMLData Compute(IMLData input) { // Allocate result var result = new BasicMLData(_outputLayer.Count); // Copy the input for (int i = 0; i < input.Count; i++) { _inputLayer.SetActivation(i, input[i]); } // Request calculation of outputs for (int i = 0; i < _outputLayer.Count; i++) { IFreeformNeuron outputNeuron = _outputLayer.Neurons[i]; outputNeuron.PerformCalculation(); result[i] = outputNeuron.Activation; } UpdateContext(); return(result); }
/// <summary> /// Construct the context neuron. /// </summary> /// <param name="theContextSource">The context source.</param> public FreeformContextNeuron(IFreeformNeuron theContextSource) : base(null) { ContextSource = theContextSource; }
/// <inheritdoc/> public IFreeformNeuron FactorContext(IFreeformNeuron neuron) { IFreeformNeuron result = new FreeformContextNeuron(neuron); return result; }
/// <summary> /// Perform the specified neuron task. /// </summary> /// <param name="visited">The visited list.</param> /// <param name="parentNeuron">The neuron to start with.</param> /// <param name="task">The task to perform.</param> private void PerformNeuronTask(HashSet<IFreeformNeuron> visited, IFreeformNeuron parentNeuron, FreeformNeuronTask task) { visited.Add(parentNeuron); task(parentNeuron); // does this neuron have any inputs? if (parentNeuron.InputSummation != null) { // visit the inputs foreach (IFreeformConnection connection in parentNeuron .InputSummation.List) { IFreeformNeuron neuron = connection.Source; // have we already visited this neuron? if (!visited.Contains(neuron)) { PerformNeuronTask(visited, neuron, task); } } } }
/// <inheritdoc/> public IFreeformConnection Factor(IFreeformNeuron theSourceNeuron, IFreeformNeuron theTargetNeuron) { return new BasicFreeformConnection(theSourceNeuron, theTargetNeuron); }
/// <inheritdoc/> public void Add(IFreeformNeuron neuron) { _neurons.Add(neuron); }
/// <inheritdoc/> public IFreeformNeuron FactorContext(IFreeformNeuron neuron) { IFreeformNeuron result = new FreeformContextNeuron(neuron); return(result); }
/// <summary> /// Calculate the gradient for a neuron. /// </summary> /// <param name="toNeuron">The neuron to calculate for.</param> private void CalculateNeuronGradient(IFreeformNeuron toNeuron) { // Only calculate if layer has inputs, because we've already handled the // output // neurons, this means a hidden layer. if (toNeuron.InputSummation != null) { // between the layer deltas between toNeuron and the neurons that // feed toNeuron. // also calculate all inbound gradeints to toNeuron foreach (IFreeformConnection connection in toNeuron .InputSummation.List) { // calculate the gradient double gradient = connection.Source.Activation * toNeuron.GetTempTraining(0); connection.AddTempTraining(0, gradient); // calculate the next layer delta IFreeformNeuron fromNeuron = connection.Source; double sum = fromNeuron.Outputs.Sum(toConnection => toConnection.Target.GetTempTraining(0)*toConnection.Weight); double neuronOutput = fromNeuron.Activation; double neuronSum = fromNeuron.Sum; double deriv = toNeuron.InputSummation .ActivationFunction .DerivativeFunction(neuronSum, neuronOutput); if (FixFlatSopt && (toNeuron.InputSummation .ActivationFunction is ActivationSigmoid)) { deriv += FlatSpotConst; } double layerDelta = sum * deriv; fromNeuron.SetTempTraining(0, layerDelta); } // recurse to the next level foreach (IFreeformConnection connection in toNeuron .InputSummation.List) { IFreeformNeuron fromNeuron = connection.Source; CalculateNeuronGradient(fromNeuron); } } }
/// <inheritdoc/> public IFreeformConnection Factor(IFreeformNeuron theSourceNeuron, IFreeformNeuron theTargetNeuron) { return(new BasicFreeformConnection(theSourceNeuron, theTargetNeuron)); }
/// <summary> /// Craete a freeform network from a basic network. /// </summary> /// <param name="network">The basic network to use.</param> public FreeformNetwork(BasicNetwork network) { if (network.LayerCount < 2) { throw new FreeformNetworkError( "The BasicNetwork must have at least two layers to be converted."); } // handle each layer IFreeformLayer previousLayer = null; for (int currentLayerIndex = 0; currentLayerIndex < network .LayerCount; currentLayerIndex++) { // create the layer IFreeformLayer currentLayer = _layerFactory.Factor(); // Is this the input layer? if (_inputLayer == null) { _inputLayer = currentLayer; } // Add the neurons for this layer for (int i = 0; i < network.GetLayerNeuronCount(currentLayerIndex); i++) { // obtain the summation object. IInputSummation summation = null; if (previousLayer != null) { summation = _summationFactory.Factor(network .GetActivation(currentLayerIndex)); } // add the new neuron currentLayer.Add(_neuronFactory.FactorRegular(summation)); } // Fully connect this layer to previous if (previousLayer != null) { ConnectLayersFromBasic(network, currentLayerIndex - 1, previousLayer, currentLayer); } // Add the bias neuron // The bias is added after connections so it has no inputs if (network.IsLayerBiased(currentLayerIndex)) { IFreeformNeuron biasNeuron = _neuronFactory .FactorRegular(null); biasNeuron.IsBias = true; biasNeuron.Activation = network .GetLayerBiasActivation(currentLayerIndex); currentLayer.Add(biasNeuron); } // update previous layer previousLayer = currentLayer; } // finally, set the output layer. _outputLayer = previousLayer; }