/// <summary> /// Compute the output for a given input to the neural network. This method /// provides a parameter to specify an output holder to use. This holder /// allows propagation training to track the output from each layer. /// If you do not need this holder pass null, or use the other /// compare method. /// </summary> /// <param name="input">The input provide to the neural network.</param> /// <param name="useHolder">Allows a holder to be specified, this allows /// propagation training to check the output of each layer.</param> /// <returns>The results from the output neurons.</returns> public virtual INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder) { NeuralOutputHolder holder; ILayer inputLayer = this.network.GetLayer(BasicNetwork.TAG_INPUT); #if logging if (FeedforwardLogic.logger.IsDebugEnabled) { FeedforwardLogic.logger.Debug("Pattern " + input.ToString() + " presented to neural network"); } #endif if (useHolder == null && this.network.Structure.Flat != null) { this.network.Structure.UpdateFlatNetwork(); INeuralData result = new BasicNeuralData(this.network.Structure.Flat.OutputCount); this.network.Structure.Flat.Compute(input.Data, result.Data); return result; } if (useHolder == null) { holder = new NeuralOutputHolder(); } else { holder = useHolder; } Compute(holder, inputLayer, input, null); return holder.Output; }
/// <summary> /// Compute the output for a given input to the neural network. This method /// provides a parameter to specify an output holder to use. This holder /// allows propagation training to track the output from each layer. /// If you do not need this holder pass null, or use the other /// compare method. /// </summary> /// <param name="input">The input provide to the neural network.</param> /// <param name="useHolder">Allows a holder to be specified, this allows /// propagation training to check the output of each layer.</param> /// <returns>The results from the output neurons.</returns> public virtual INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder) { NeuralOutputHolder holder; ILayer inputLayer = this.network.GetLayer(BasicNetwork.TAG_INPUT); #if logging if (FeedforwardLogic.logger.IsDebugEnabled) { FeedforwardLogic.logger.Debug("Pattern " + input.ToString() + " presented to neural network"); } #endif if (useHolder == null && this.network.Structure.Flat != null) { this.network.Structure.UpdateFlatNetwork(); INeuralData result = new BasicNeuralData(this.network.Structure.Flat.OutputCount); this.network.Structure.Flat.Compute(input.Data, result.Data); return(result); } if (useHolder == null) { holder = new NeuralOutputHolder(); } else { holder = useHolder; } Compute(holder, inputLayer, input, null); return(holder.Output); }
/// <summary> /// Setup the network logic, read parameters from the network. /// NOT USED, call the run method. /// </summary> /// <param name="input">Not used</param> /// <param name="useHolder">Not used</param> /// <returns>Not used</returns> public override INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder) { String str = "Compute on BasicNetwork cannot be used, rather call" + " the run method on the logic class."; #if logging if (logger.IsErrorEnabled) { logger.Error(str); } #endif throw new NeuralNetworkError(str); }
/// <summary> /// Internal computation method for a single layer. This is called, /// as the neural network processes. /// </summary> /// <param name="holder">The output holder.</param> /// <param name="layer">The layer to process.</param> /// <param name="input">The input to this layer.</param> /// <param name="source">The source synapse.</param> private void Compute(NeuralOutputHolder holder, ILayer layer, INeuralData input, ISynapse source) { try { #if logging if (FeedforwardLogic.logger.IsDebugEnabled) { FeedforwardLogic.logger.Debug("Processing layer: " + layer.ToString() + ", input= " + input.ToString()); } #endif // typically used to process any recurrent layers that feed into this // layer. PreprocessLayer(layer, input, source); foreach (ISynapse synapse in layer.Next) { if (!holder.Result.ContainsKey(synapse)) { #if logging if (FeedforwardLogic.logger.IsDebugEnabled) { FeedforwardLogic.logger.Debug("Processing synapse: " + synapse.ToString()); } #endif INeuralData pattern = synapse.Compute(input); pattern = synapse.ToLayer.Compute(pattern); synapse.ToLayer.Process(pattern); holder.Result[synapse] = input; Compute(holder, synapse.ToLayer, pattern, synapse); ILayer outputLayer = this.network.GetLayer(BasicNetwork.TAG_OUTPUT); // Is this the output from the entire network? if (synapse.ToLayer == outputLayer) { holder.Output = pattern; } } } } catch (IndexOutOfRangeException ex) { throw new NeuralNetworkError("Size mismatch on input of size " + input.Count + " and layer: ", ex); } }
private void _compute(NeuralOutputHolder holder, NeuralLayer layer, NVector input, Synapse source) { PreProcessLayer(layer, input, source); foreach (var synapse in layer.OutputSynapses) { if (!holder.Results.ContainsKey(synapse)) { var nextLayer = synapse.OutputLayer; var pattern = synapse.Compute(input); pattern = synapse.OutputLayer.Compute(pattern); synapse.OutputLayer.Process(pattern); holder.Results[synapse] = input; _compute(holder, synapse.OutputLayer, pattern, synapse); if (nextLayer == Network.OutputLayer) { holder.Output = pattern; } } } }
/// <summary> /// Compute the output for the BasicNetwork class. /// </summary> /// <param name="input">The input to the network.</param> /// <param name="useHolder">The NeuralOutputHolder to use.</param> /// <returns>The output from the network.</returns> public override INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder) { if (!(input is BiPolarNeuralData)) { String str = "Input to ART1 logic network must be BiPolarNeuralData."; #if logging if (logger.IsErrorEnabled) { logger.Error(str); } #endif throw new NeuralNetworkError(str); } BiPolarNeuralData output = new BiPolarNeuralData(this.layerF1.NeuronCount); Compute((BiPolarNeuralData)input, output); return(output); }
/// <summary/> public override NVector Compute(NVector input) { var holder = new NeuralOutputHolder(); _compute(holder, Network.InputLayer, input, null); return holder.Output; }
/// <summary> /// Calculate the derivatives for this training set element. /// </summary> /// <param name="pair">The training set element.</param> /// <returns>The sum squared of errors.</returns> private double CalculateDerivatives(INeuralDataPair pair) { // error values double e = 0.0; double sum = 0.0; IActivationFunction function = this.network.GetLayer( BasicNetwork.TAG_INPUT).ActivationFunction; NeuralOutputHolder holder = new NeuralOutputHolder(); this.network.Compute(pair.Input, holder); IList<ISynapse> synapses = this.network.Structure.Synapses; int synapseNumber = 0; ISynapse synapse = synapses[synapseNumber++]; double output = holder.Output[0]; e = pair.Ideal[0] - output; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output); for (int i = 0; i < synapse.FromNeuronCount; i++) { double lastOutput = holder.Result[synapse][i]; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output) * lastOutput; } ISynapse lastSynapse; while (synapseNumber < synapses.Count) { lastSynapse = synapse; synapse = synapses[synapseNumber++]; INeuralData outputData = holder.Result[lastSynapse]; int biasCol = this.jacobianCol; this.jacobianCol += synapse.ToLayer.NeuronCount; // for each neuron in the input layer for (int neuron = 0; neuron < synapse.ToNeuronCount; neuron++) { output = outputData[neuron]; // for each weight of the input neuron for (int i = 0; i < synapse.FromNeuronCount; i++) { sum = 0.0; // for each neuron in the next layer for (int j = 0; j < lastSynapse.ToNeuronCount; j++) { // for each weight of the next neuron for (int k = 0; k < lastSynapse.FromNeuronCount; k++) { double x = lastSynapse.WeightMatrix[k, j]; double y = output; sum += lastSynapse.WeightMatrix[k, j] * output; } sum += lastSynapse.ToLayer.BiasWeights[j]; } double x1 = CalcDerivative(function, output); double x2 = CalcDerivative2(function, sum); double x3 = holder.Result[synapse][i]; double w = lastSynapse.WeightMatrix[neuron, 0]; double val = CalcDerivative(function, output) * CalcDerivative2(function, sum) * w; double z1 = val * holder.Result[synapse][i]; double z2 = val; this.jacobian[this.jacobianRow][this.jacobianCol++] = val * holder.Result[synapse][i]; this.jacobian[this.jacobianRow][biasCol + neuron] = val; } } } // return error return e; }
/// <summary> /// Calculate the derivatives for this training set element. /// </summary> /// <param name="pair">The training set element.</param> /// <returns>The sum squared of errors.</returns> private double CalculateDerivatives(INeuralDataPair pair) { // error values double e = 0.0; double sum = 0.0; IActivationFunction function = this.network.GetLayer( BasicNetwork.TAG_INPUT).ActivationFunction; NeuralOutputHolder holder = new NeuralOutputHolder(); this.network.Compute(pair.Input, holder); IList <ISynapse> synapses = this.network.Structure.Synapses; int synapseNumber = 0; ISynapse synapse = synapses[synapseNumber++]; double output = holder.Output[0]; e = pair.Ideal[0] - output; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output); for (int i = 0; i < synapse.FromNeuronCount; i++) { double lastOutput = holder.Result[synapse][i]; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output) * lastOutput; } ISynapse lastSynapse; while (synapseNumber < synapses.Count) { lastSynapse = synapse; synapse = synapses[synapseNumber++]; INeuralData outputData = holder.Result[lastSynapse]; int biasCol = this.jacobianCol; this.jacobianCol += synapse.ToLayer.NeuronCount; // for each neuron in the input layer for (int neuron = 0; neuron < synapse.ToNeuronCount; neuron++) { output = outputData[neuron]; // for each weight of the input neuron for (int i = 0; i < synapse.FromNeuronCount; i++) { sum = 0.0; // for each neuron in the next layer for (int j = 0; j < lastSynapse.ToNeuronCount; j++) { // for each weight of the next neuron for (int k = 0; k < lastSynapse.FromNeuronCount; k++) { double x = lastSynapse.WeightMatrix[k, j]; double y = output; sum += lastSynapse.WeightMatrix[k, j] * output; } sum += lastSynapse.ToLayer.BiasWeights[j]; } double x1 = CalcDerivative(function, output); double x2 = CalcDerivative2(function, sum); double x3 = holder.Result[synapse][i]; double w = lastSynapse.WeightMatrix[neuron, 0]; double val = CalcDerivative(function, output) * CalcDerivative2(function, sum) * w; double z1 = val * holder.Result[synapse][i]; double z2 = val; this.jacobian[this.jacobianRow][this.jacobianCol++] = val * holder.Result[synapse][i]; this.jacobian[this.jacobianRow][biasCol + neuron] = val; } } } // return error return(e); }
/// <summary> /// Compute the output for the given input. /// </summary> /// <param name="input">The input to the SVM.</param> /// <param name="useHolder">The output holder to use.</param> /// <returns>The results from the SVM.</returns> public override INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder) { useHolder.Output = Compute(input); return useHolder.Output; }
/// <summary> /// Compute the output for the BasicNetwork class. /// </summary> /// <param name="input">The input to the network.</param> /// <param name="useHolder">The NeuralOutputHolder to use.</param> /// <returns>The output from the network.</returns> public override INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder) { if (!(input is BiPolarNeuralData)) { String str = "Input to ART1 logic network must be BiPolarNeuralData."; #if logging if (logger.IsErrorEnabled) { logger.Error(str); } #endif throw new NeuralNetworkError(str); } BiPolarNeuralData output = new BiPolarNeuralData(this.layerF1.NeuronCount); Compute((BiPolarNeuralData)input, output); return output; }
/// <summary> /// Compute the output for the BasicNetwork class. /// </summary> /// <param name="input">The input to the network.</param> /// <param name="useHolder">The NeuralOutputHolder to use.</param> /// <returns>The output from the network.</returns> public abstract INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder);
/// <summary> /// Compute the output for the given input. /// </summary> /// <param name="input">The input to the SVM.</param> /// <param name="useHolder">The output holder to use.</param> /// <returns>The results from the SVM.</returns> public override INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder) { useHolder.Output = Compute(input); return(useHolder.Output); }