/// <summary> /// Return a clone of this neural network. Including structure, weights and /// threshold values. /// </summary> /// <returns>A cloned copy of the neural network.</returns> public Object Clone() { FeedforwardNetwork result = CloneStructure(); Double[] copy = MatrixCODEC.NetworkToArray(this); MatrixCODEC.ArrayToNetwork(copy, result); return(result); }
/// <summary> /// Return a clone of the structure of this neural network. /// </summary> /// <returns>A cloned copy of the structure of the neural network.</returns> public FeedforwardNetwork CloneStructure() { FeedforwardNetwork result = new FeedforwardNetwork(); foreach (FeedforwardLayer layer in this.layers) { FeedforwardLayer clonedLayer = new FeedforwardLayer(layer.NeuronCount); result.AddLayer(clonedLayer); } return(result); }
/// <summary> /// Convert from an array. Use an array to populate the memory of the neural network. /// </summary> /// <param name="array">An array that will hold the memory of the neural network.</param> /// <param name="network">A neural network to convert to an array.</param> public static void ArrayToNetwork(Double[] array, FeedforwardNetwork network) { // copy data to array int index = 0; foreach (FeedforwardLayer layer in network.Layers) { // now the weight matrix(if it exists) if (layer.Next != null) { index = layer.LayerMatrix.FromPackedArray(array, index); } } }
/// <summary> /// Convert to an array. This is used with some training algorithms that /// require that the "memory" of the neuron(the weight and threshold values) /// be expressed as a linear array. /// </summary> /// <param name="network">A neural network.</param> /// <returns>The memory of the neural network as an array.</returns> public static double[] NetworkToArray(FeedforwardNetwork network) { int size = 0; // first determine size foreach (FeedforwardLayer layer in network.Layers) { // count the size of the weight matrix if (layer.HasMatrix()) { size += layer.MatrixSize; } } // allocate an array to hold Double[] result = new Double[size]; // copy data to array int index = 0; foreach (FeedforwardLayer layer in network.Layers) { // now the weight matrix(if it exists) if (layer.Next != null) { Double[] matrix = layer.LayerMatrix.ToPackedArray(); for (int i = 0; i < matrix.Length; i++) { result[index++] = matrix[i]; } } } return result; }
/// <summary> /// Compare the two neural networks. For them to be equal they must be of the /// same structure, and have the same matrix values. /// </summary> /// <param name="other">The other neural network.</param> /// <returns>True if the two networks are equal.</returns> public bool Equals(FeedforwardNetwork other) { int i = 0; foreach (FeedforwardLayer layer in this.Layers) { FeedforwardLayer otherLayer = other.Layers[i++]; if (layer.NeuronCount != otherLayer.NeuronCount) { return(false); } // make sure they either both have or do not have // a weight matrix. if ((layer.LayerMatrix == null) && (otherLayer.LayerMatrix != null)) { return(false); } if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix == null)) { return(false); } // if they both have a matrix, then compare the matrices if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix != null)) { if (!layer.LayerMatrix.Equals(otherLayer.LayerMatrix)) { return(false); } } } return(true); }
/// <summary> /// Method that is called to start the incremental prune process. /// </summary> public void StartIncremental() { this.hiddenNeuronCount = 1; this.cycles = 0; this.done = false; this.currentNetwork = new FeedforwardNetwork(); this.currentNetwork .AddLayer(new FeedforwardLayer(this.train[0].Length)); this.currentNetwork.AddLayer(new FeedforwardLayer( this.hiddenNeuronCount)); this.currentNetwork .AddLayer(new FeedforwardLayer(this.ideal[0].Length)); this.currentNetwork.Reset(); this.backprop = new Backpropagation(this.currentNetwork, this.train, this.ideal, this.rate, this.momentum); }
/// <summary> /// Internal method that is called at the end of each incremental cycle. /// </summary> protected void Increment() { bool doit = false; if (this.markErrorRate == 0) { this.markErrorRate = this.error; this.sinceMark = 0; } else { this.sinceMark++; if (this.sinceMark > 10000) { if ((this.markErrorRate - this.error) < 0.01) { doit = true; } this.markErrorRate = this.error; this.sinceMark = 0; } } if (this.error < this.maxError) { this.done = true; } if (doit) { this.cycles = 0; this.hiddenNeuronCount++; this.currentNetwork = new FeedforwardNetwork(); this.currentNetwork.AddLayer(new FeedforwardLayer( this.train[0].Length)); this.currentNetwork.AddLayer(new FeedforwardLayer( this.hiddenNeuronCount)); this.currentNetwork.AddLayer(new FeedforwardLayer( this.ideal[0].Length)); this.currentNetwork.Reset(); this.backprop = new Backpropagation(this.currentNetwork, this.train, this.ideal, this.rate, this.momentum); } }
/// <summary> /// Internal method that will loop through all hidden neurons and prune them /// if pruning the neuron does not cause too great of an increase in error. /// </summary> /// <returns>True if a prune was made, false otherwise.</returns> protected bool FindNeuron() { for (int i = 0; i < this.HiddenCount; i++) { FeedforwardNetwork trial = this.ClipHiddenNeuron(i); double e2 = DetermineError(trial); if (e2 < this.maxError) { this.currentNetwork = trial; return true; } } return false; }
/// <summary> /// Internal method to determine the error for a neural network. /// </summary> /// <param name="network">The neural network that we are seeking a error rate for.</param> /// <returns>The error for the specified neural network.</returns> protected double DetermineError(FeedforwardNetwork network) { return network.CalculateError(this.train, this.ideal); }
/// <summary> /// Constructor that is designed to setup for a selective prune. /// </summary> /// <param name="network">The neural network that we wish to prune.</param> /// <param name="train">The training set input data.</param> /// <param name="ideal">The ideal outputs for the training set input data.</param> /// <param name="maxError">The maximum allowed error rate.</param> public Prune(FeedforwardNetwork network, double[][] train, double[][] ideal, double maxError) { this.currentNetwork = network; this.train = train; this.ideal = ideal; this.maxError = maxError; }
/// <summary> /// Compare the two neural networks. For them to be equal they must be of the /// same structure, and have the same matrix values. /// </summary> /// <param name="other">The other neural network.</param> /// <returns>True if the two networks are equal.</returns> public bool Equals(FeedforwardNetwork other) { int i = 0; foreach (FeedforwardLayer layer in this.Layers) { FeedforwardLayer otherLayer = other.Layers[i++]; if (layer.NeuronCount != otherLayer.NeuronCount) { return false; } // make sure they either both have or do not have // a weight matrix. if ((layer.LayerMatrix == null) && (otherLayer.LayerMatrix != null)) { return false; } if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix == null)) { return false; } // if they both have a matrix, then compare the matrices if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix != null)) { if (!layer.LayerMatrix.Equals(otherLayer.LayerMatrix)) { return false; } } } return true; }
/// <summary> /// Return a clone of the structure of this neural network. /// </summary> /// <returns>A cloned copy of the structure of the neural network.</returns> public FeedforwardNetwork CloneStructure() { FeedforwardNetwork result = new FeedforwardNetwork(); foreach (FeedforwardLayer layer in this.layers) { FeedforwardLayer clonedLayer = new FeedforwardLayer(layer.NeuronCount); result.AddLayer(clonedLayer); } return result; }